Importing the Libraries¶

In [ ]:
import pandas as pd
import numpy as np
import matplotlib as plt
import matplotlib.pyplot as plt
import seaborn
import scipy
import sys
import os
import pickle
import librosa
import librosa.display
from IPython.display import Audio
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
import tensorflow as tf
from tensorflow import keras
In [ ]:
from google.colab import drive
drive.mount('/content/drive')
Mounted at /content/drive

Read the CSV file¶

In [ ]:
path = '/content/drive/Shareddrives/Project 4.2/Soft Comp Dataset/Data'
dataset = pd.read_csv(path+'/features_3_sec.csv',low_memory=False)
df = pd.DataFrame(dataset)
df
Out[ ]:
filename length chroma_stft_mean chroma_stft_var rms_mean rms_var spectral_centroid_mean spectral_centroid_var spectral_bandwidth_mean spectral_bandwidth_var ... mfcc16_var mfcc17_mean mfcc17_var mfcc18_mean mfcc18_var mfcc19_mean mfcc19_var mfcc20_mean mfcc20_var label
0 blues.00000.0.wav 66149 0.335406 0.091048 0.130405 0.003521 1773.065032 167541.630869 1972.744388 117335.771563 ... 39.687145 -3.241280 36.488243 0.722209 38.099152 -5.050335 33.618073 -0.243027 43.771767 blues
1 blues.00000.1.wav 66149 0.343065 0.086147 0.112699 0.001450 1816.693777 90525.690866 2010.051501 65671.875673 ... 64.748276 -6.055294 40.677654 0.159015 51.264091 -2.837699 97.030830 5.784063 59.943081 blues
2 blues.00000.2.wav 66149 0.346815 0.092243 0.132003 0.004620 1788.539719 111407.437613 2084.565132 75124.921716 ... 67.336563 -1.768610 28.348579 2.378768 45.717648 -1.938424 53.050835 2.517375 33.105122 blues
3 blues.00000.3.wav 66149 0.363639 0.086856 0.132565 0.002448 1655.289045 111952.284517 1960.039988 82913.639269 ... 47.739452 -3.841155 28.337118 1.218588 34.770935 -3.580352 50.836224 3.630866 32.023678 blues
4 blues.00000.4.wav 66149 0.335579 0.088129 0.143289 0.001701 1630.656199 79667.267654 1948.503884 60204.020268 ... 30.336359 0.664582 45.880913 1.689446 51.363583 -3.392489 26.738789 0.536961 29.146694 blues
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
9985 rock.00099.5.wav 66149 0.349126 0.080515 0.050019 0.000097 1499.083005 164266.886443 1718.707215 85931.574523 ... 42.485981 -9.094270 38.326839 -4.246976 31.049839 -5.625813 48.804092 1.818823 38.966969 rock
9986 rock.00099.6.wav 66149 0.372564 0.082626 0.057897 0.000088 1847.965128 281054.935973 1906.468492 99727.037054 ... 32.415203 -12.375726 66.418587 -3.081278 54.414265 -11.960546 63.452255 0.428857 18.697033 rock
9987 rock.00099.7.wav 66149 0.347481 0.089019 0.052403 0.000701 1346.157659 662956.246325 1561.859087 138762.841945 ... 78.228149 -2.524483 21.778994 4.809936 25.980829 1.775686 48.582378 -0.299545 41.586990 rock
9988 rock.00099.8.wav 66149 0.387527 0.084815 0.066430 0.000320 2084.515327 203891.039161 2018.366254 22860.992562 ... 28.323744 -5.363541 17.209942 6.462601 21.442928 2.354765 24.843613 0.675824 12.787750 rock
9989 rock.00099.9.wav 66149 0.369293 0.086759 0.050524 0.000067 1634.330126 411429.169769 1867.422378 119722.211518 ... 38.801735 -11.598399 58.983097 -0.178517 55.761299 -6.903252 39.485901 -3.412534 31.727489 rock

9990 rows × 60 columns

About the Dataset¶

In [ ]:
df.shape
Out[ ]:
(9990, 60)
In [ ]:
df.dtypes
Out[ ]:
filename                    object
length                       int64
chroma_stft_mean           float64
chroma_stft_var            float64
rms_mean                   float64
rms_var                    float64
spectral_centroid_mean     float64
spectral_centroid_var      float64
spectral_bandwidth_mean    float64
spectral_bandwidth_var     float64
rolloff_mean               float64
rolloff_var                float64
zero_crossing_rate_mean    float64
zero_crossing_rate_var     float64
harmony_mean               float64
harmony_var                float64
perceptr_mean              float64
perceptr_var               float64
tempo                      float64
mfcc1_mean                 float64
mfcc1_var                  float64
mfcc2_mean                 float64
mfcc2_var                  float64
mfcc3_mean                 float64
mfcc3_var                  float64
mfcc4_mean                 float64
mfcc4_var                  float64
mfcc5_mean                 float64
mfcc5_var                  float64
mfcc6_mean                 float64
mfcc6_var                  float64
mfcc7_mean                 float64
mfcc7_var                  float64
mfcc8_mean                 float64
mfcc8_var                  float64
mfcc9_mean                 float64
mfcc9_var                  float64
mfcc10_mean                float64
mfcc10_var                 float64
mfcc11_mean                float64
mfcc11_var                 float64
mfcc12_mean                float64
mfcc12_var                 float64
mfcc13_mean                float64
mfcc13_var                 float64
mfcc14_mean                float64
mfcc14_var                 float64
mfcc15_mean                float64
mfcc15_var                 float64
mfcc16_mean                float64
mfcc16_var                 float64
mfcc17_mean                float64
mfcc17_var                 float64
mfcc18_mean                float64
mfcc18_var                 float64
mfcc19_mean                float64
mfcc19_var                 float64
mfcc20_mean                float64
mfcc20_var                 float64
label                       object
dtype: object
In [ ]:
df=df.drop(labels='filename',axis=1)
In [ ]:
df
Out[ ]:
length chroma_stft_mean chroma_stft_var rms_mean rms_var spectral_centroid_mean spectral_centroid_var spectral_bandwidth_mean spectral_bandwidth_var rolloff_mean ... mfcc16_var mfcc17_mean mfcc17_var mfcc18_mean mfcc18_var mfcc19_mean mfcc19_var mfcc20_mean mfcc20_var label
0 66149 0.335406 0.091048 0.130405 0.003521 1773.065032 167541.630869 1972.744388 117335.771563 3714.560359 ... 39.687145 -3.241280 36.488243 0.722209 38.099152 -5.050335 33.618073 -0.243027 43.771767 blues
1 66149 0.343065 0.086147 0.112699 0.001450 1816.693777 90525.690866 2010.051501 65671.875673 3869.682242 ... 64.748276 -6.055294 40.677654 0.159015 51.264091 -2.837699 97.030830 5.784063 59.943081 blues
2 66149 0.346815 0.092243 0.132003 0.004620 1788.539719 111407.437613 2084.565132 75124.921716 3997.639160 ... 67.336563 -1.768610 28.348579 2.378768 45.717648 -1.938424 53.050835 2.517375 33.105122 blues
3 66149 0.363639 0.086856 0.132565 0.002448 1655.289045 111952.284517 1960.039988 82913.639269 3568.300218 ... 47.739452 -3.841155 28.337118 1.218588 34.770935 -3.580352 50.836224 3.630866 32.023678 blues
4 66149 0.335579 0.088129 0.143289 0.001701 1630.656199 79667.267654 1948.503884 60204.020268 3469.992864 ... 30.336359 0.664582 45.880913 1.689446 51.363583 -3.392489 26.738789 0.536961 29.146694 blues
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
9985 66149 0.349126 0.080515 0.050019 0.000097 1499.083005 164266.886443 1718.707215 85931.574523 3015.559458 ... 42.485981 -9.094270 38.326839 -4.246976 31.049839 -5.625813 48.804092 1.818823 38.966969 rock
9986 66149 0.372564 0.082626 0.057897 0.000088 1847.965128 281054.935973 1906.468492 99727.037054 3746.694524 ... 32.415203 -12.375726 66.418587 -3.081278 54.414265 -11.960546 63.452255 0.428857 18.697033 rock
9987 66149 0.347481 0.089019 0.052403 0.000701 1346.157659 662956.246325 1561.859087 138762.841945 2442.362154 ... 78.228149 -2.524483 21.778994 4.809936 25.980829 1.775686 48.582378 -0.299545 41.586990 rock
9988 66149 0.387527 0.084815 0.066430 0.000320 2084.515327 203891.039161 2018.366254 22860.992562 4313.266226 ... 28.323744 -5.363541 17.209942 6.462601 21.442928 2.354765 24.843613 0.675824 12.787750 rock
9989 66149 0.369293 0.086759 0.050524 0.000067 1634.330126 411429.169769 1867.422378 119722.211518 3462.042142 ... 38.801735 -11.598399 58.983097 -0.178517 55.761299 -6.903252 39.485901 -3.412534 31.727489 rock

9990 rows × 59 columns

In [ ]:
audio_recording="/content/drive/Shareddrives/Project 4.2/Soft Comp Dataset/Data/genres_original/country/country.00050.wav"
data,sr = librosa.load(audio_recording)
print(type(data),type(sr))
<class 'numpy.ndarray'> <class 'int'>
In [ ]:
librosa.load(audio_recording, sr= 45600)
Out[ ]:
(array([ 0.04582627,  0.06254332,  0.0583379 , ..., -0.13857861,
        -0.11823352, -0.05911855], dtype=float32), 45600)
In [ ]:
import IPython
IPython.display.Audio(data, rate=sr)
Out[ ]:
Your browser does not support the audio element.

Visualizing Audio Files¶

In [ ]:
plt.figure(figsize=(12,4))
librosa.display.waveplot(data, color="#2B4F72")
plt.show()
In [ ]:
stft = librosa.stft(data)
stft_db = librosa.amplitude_to_db(abs(stft))
#plt.show()
plt.figure(figsize=(14,6))
librosa.display.specshow(stft, sr=sr, x_axis='time',y_axis='hz')
plt.colorbar()
/usr/local/lib/python3.7/dist-packages/librosa/display.py:955: UserWarning: Trying to display complex-valued input. Showing magnitude instead.
  "Trying to display complex-valued input. " "Showing magnitude instead."
Out[ ]:
<matplotlib.colorbar.Colorbar at 0x7fc0067da290>
In [ ]:
stft = librosa.stft(data)
stft_db = librosa.amplitude_to_db(abs(stft))
#plt.show()
plt.figure(figsize=(14,6))
librosa.display.specshow(stft_db, sr=sr, x_axis='time',y_axis='hz')
plt.colorbar()
Out[ ]:
<matplotlib.colorbar.Colorbar at 0x7fc00671fd90>
In [ ]:
from sklearn.preprocessing import normalize
spectral_rolloff = librosa.feature.spectral_rolloff(data+0.01,sr=sr)[0]
plt.figure(figsize=(12,4))
#plt.show()
librosa.display.waveplot(data, sr= sr, alpha=0.4 , color ="#2B4F72")
Out[ ]:
<matplotlib.collections.PolyCollection at 0x7fc0066ac590>
In [ ]:
import librosa.display as lplt
chroma = librosa.feature.chroma_stft(data,sr=sr)
plt.figure(figsize=(16,6))
lplt.specshow(chroma, sr= sr,x_axis='time',y_axis='chroma',cmap='coolwarm')
plt.colorbar()
plt.title("Chroma Features")
plt.show()
In [ ]:
start = 1000
end = 1200
plt.figure(figsize=(14,5))
plt.plot(data[start:end],color="#2B4F72")
plt.grid()
In [ ]:
zero_cross_rate=librosa.zero_crossings(data[start:end],pad=False)
print("The number of zero-crossings is:",sum(zero_cross_rate))
The number of zero-crossings is: 8

Feature Extraction¶

In [ ]:
class_list=df.iloc[:,-1]
convertor=LabelEncoder()
In [ ]:
df.iloc[:,-1]
Out[ ]:
0       blues
1       blues
2       blues
3       blues
4       blues
        ...  
9985     rock
9986     rock
9987     rock
9988     rock
9989     rock
Name: label, Length: 9990, dtype: object
In [ ]:
y=convertor.fit_transform(class_list)
y
Out[ ]:
array([0, 0, 0, ..., 9, 9, 9])
In [ ]:
#data,sr=librosa.load(audio_recording)
#mfcc=librosa.feature.mfcc(data,sr=sr,hop_length=3,n_mfcc=13)
In [ ]:
print(df.iloc[:,:-1])
      length  chroma_stft_mean  chroma_stft_var  rms_mean   rms_var  \
0      66149          0.335406         0.091048  0.130405  0.003521   
1      66149          0.343065         0.086147  0.112699  0.001450   
2      66149          0.346815         0.092243  0.132003  0.004620   
3      66149          0.363639         0.086856  0.132565  0.002448   
4      66149          0.335579         0.088129  0.143289  0.001701   
...      ...               ...              ...       ...       ...   
9985   66149          0.349126         0.080515  0.050019  0.000097   
9986   66149          0.372564         0.082626  0.057897  0.000088   
9987   66149          0.347481         0.089019  0.052403  0.000701   
9988   66149          0.387527         0.084815  0.066430  0.000320   
9989   66149          0.369293         0.086759  0.050524  0.000067   

      spectral_centroid_mean  spectral_centroid_var  spectral_bandwidth_mean  \
0                1773.065032          167541.630869              1972.744388   
1                1816.693777           90525.690866              2010.051501   
2                1788.539719          111407.437613              2084.565132   
3                1655.289045          111952.284517              1960.039988   
4                1630.656199           79667.267654              1948.503884   
...                      ...                    ...                      ...   
9985             1499.083005          164266.886443              1718.707215   
9986             1847.965128          281054.935973              1906.468492   
9987             1346.157659          662956.246325              1561.859087   
9988             2084.515327          203891.039161              2018.366254   
9989             1634.330126          411429.169769              1867.422378   

      spectral_bandwidth_var  rolloff_mean  ...  mfcc16_mean  mfcc16_var  \
0              117335.771563   3714.560359  ...    -2.853603   39.687145   
1               65671.875673   3869.682242  ...     4.074709   64.748276   
2               75124.921716   3997.639160  ...     4.806280   67.336563   
3               82913.639269   3568.300218  ...    -1.359111   47.739452   
4               60204.020268   3469.992864  ...     2.092937   30.336359   
...                      ...           ...  ...          ...         ...   
9985            85931.574523   3015.559458  ...     5.773784   42.485981   
9986            99727.037054   3746.694524  ...     2.074155   32.415203   
9987           138762.841945   2442.362154  ...    -1.005473   78.228149   
9988            22860.992562   4313.266226  ...     4.123402   28.323744   
9989           119722.211518   3462.042142  ...     1.342274   38.801735   

      mfcc17_mean  mfcc17_var  mfcc18_mean  mfcc18_var  mfcc19_mean  \
0       -3.241280   36.488243     0.722209   38.099152    -5.050335   
1       -6.055294   40.677654     0.159015   51.264091    -2.837699   
2       -1.768610   28.348579     2.378768   45.717648    -1.938424   
3       -3.841155   28.337118     1.218588   34.770935    -3.580352   
4        0.664582   45.880913     1.689446   51.363583    -3.392489   
...           ...         ...          ...         ...          ...   
9985    -9.094270   38.326839    -4.246976   31.049839    -5.625813   
9986   -12.375726   66.418587    -3.081278   54.414265   -11.960546   
9987    -2.524483   21.778994     4.809936   25.980829     1.775686   
9988    -5.363541   17.209942     6.462601   21.442928     2.354765   
9989   -11.598399   58.983097    -0.178517   55.761299    -6.903252   

      mfcc19_var  mfcc20_mean  mfcc20_var  
0      33.618073    -0.243027   43.771767  
1      97.030830     5.784063   59.943081  
2      53.050835     2.517375   33.105122  
3      50.836224     3.630866   32.023678  
4      26.738789     0.536961   29.146694  
...          ...          ...         ...  
9985   48.804092     1.818823   38.966969  
9986   63.452255     0.428857   18.697033  
9987   48.582378    -0.299545   41.586990  
9988   24.843613     0.675824   12.787750  
9989   39.485901    -3.412534   31.727489  

[9990 rows x 58 columns]

Scaling the Features¶

In [ ]:
from sklearn.preprocessing import StandardScaler
fit = StandardScaler()
X =fit.fit_transform(np.array(df.iloc[:,:-1], dtype = float))

Dividing the Dataset¶

In [ ]:
X_train,X_test,y_train,y_test = train_test_split(X,y,test_size=0.33)
In [ ]:
X_train=X_train[...,np.newaxis]
X_test=X_test[...,np.newaxis]
In [ ]:
X_train
Out[ ]:
array([[[ 0.        ],
        [ 2.02045593],
        [-2.60195564],
        ...,
        [-0.96684407],
        [ 0.03161355],
        [-0.88858496]],

       [[ 0.        ],
        [ 0.37760869],
        [-1.85454356],
        ...,
        [-0.80325187],
        [ 0.08909764],
        [-0.97869112]],

       [[ 0.        ],
        [-0.41858148],
        [ 1.84020277],
        ...,
        [ 1.47498293],
        [-0.75692505],
        [ 0.01971337]],

       ...,

       [[ 0.        ],
        [ 0.66887745],
        [-0.65200896],
        ...,
        [ 0.08318575],
        [-0.43929117],
        [-0.59390196]],

       [[ 0.        ],
        [-0.08842626],
        [ 1.50654282],
        ...,
        [-0.76081947],
        [-0.57078258],
        [-0.63554285]],

       [[ 0.        ],
        [ 0.52164327],
        [ 0.30993474],
        ...,
        [ 0.8213119 ],
        [ 0.99231119],
        [-0.35442522]]])
In [ ]:
print(X_train.shape)
print(y_train.shape)
print(X_test.shape)
print(y_test.shape)
(6693, 58, 1)
(6693,)
(3297, 58, 1)
(3297,)
In [ ]:
len(y_train)
Out[ ]:
6693
In [ ]:
len(y_test)
Out[ ]:
3297
In [ ]:
from keras.models import Sequential 
def trainModel(model,epochs,optimizer):
  batch_size=128
  model.compile(optimizer=optimizer,
                loss='sparse_categorical_crossentropy',
                metrics=['accuracy']
  )
  return model.fit(X_train,y_train,validation_data=(X_test,y_test),epochs=epochs,
                   batch_size=batch_size)
In [ ]:
def plotValidate(history):
  print("Validation Accuracy",max(history.history["val_accuracy"]))
  pd.DataFrame(history.history).pyplot
  pd.DataFrame(history.history).plot(figsize=(12,6))
In [ ]:
from keras.layers import  Dense
model = keras.Sequential()
model.add(keras.layers.Dense(512,activation='relu',input_shape=(X_train.shape[1],)))
model.add(keras.layers.Dropout(0.2))
model.add(keras.layers.Dense(256,activation='relu'))
model.add(keras.layers.Dropout(0.2))
model.add(keras.layers.Dense(128,activation='relu'))
model.add(keras.layers.Dropout(0.2))
model.add(keras.layers.Dense(64,activation='relu'))
model.add(keras.layers.Dropout(0.2))
model.add(Dense(10,activation='softmax'))
print(model.summary())
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 dense (Dense)               (None, 512)               30208     
                                                                 
 dropout (Dropout)           (None, 512)               0         
                                                                 
 dense_1 (Dense)             (None, 256)               131328    
                                                                 
 dropout_1 (Dropout)         (None, 256)               0         
                                                                 
 dense_2 (Dense)             (None, 128)               32896     
                                                                 
 dropout_2 (Dropout)         (None, 128)               0         
                                                                 
 dense_3 (Dense)             (None, 64)                8256      
                                                                 
 dropout_3 (Dropout)         (None, 64)                0         
                                                                 
 dense_4 (Dense)             (None, 10)                650       
                                                                 
=================================================================
Total params: 203,338
Trainable params: 203,338
Non-trainable params: 0
_________________________________________________________________
None
In [ ]:
model_history=trainModel(model=model,epochs=60,optimizer='adam')
test_loss,test_acc= model.evaluate(X_test,y_test,batch_size=128)
print("The test Loss is:",test_loss)
print("\nThe Best test Accuracy is:",test_acc*100)
Epoch 1/60
53/53 [==============================] - 2s 16ms/step - loss: 1.6597 - accuracy: 0.4052 - val_loss: 1.1171 - val_accuracy: 0.6230
Epoch 2/60
53/53 [==============================] - 1s 11ms/step - loss: 1.1388 - accuracy: 0.6017 - val_loss: 0.8748 - val_accuracy: 0.7082
Epoch 3/60
53/53 [==============================] - 1s 12ms/step - loss: 0.9244 - accuracy: 0.6791 - val_loss: 0.7716 - val_accuracy: 0.7419
Epoch 4/60
53/53 [==============================] - 1s 10ms/step - loss: 0.7918 - accuracy: 0.7299 - val_loss: 0.7029 - val_accuracy: 0.7592
Epoch 5/60
53/53 [==============================] - 1s 12ms/step - loss: 0.7011 - accuracy: 0.7623 - val_loss: 0.6182 - val_accuracy: 0.7989
Epoch 6/60
53/53 [==============================] - 1s 11ms/step - loss: 0.6118 - accuracy: 0.7907 - val_loss: 0.5897 - val_accuracy: 0.8089
Epoch 7/60
53/53 [==============================] - 1s 10ms/step - loss: 0.5529 - accuracy: 0.8143 - val_loss: 0.5502 - val_accuracy: 0.8274
Epoch 8/60
53/53 [==============================] - 1s 10ms/step - loss: 0.4937 - accuracy: 0.8330 - val_loss: 0.5211 - val_accuracy: 0.8347
Epoch 9/60
53/53 [==============================] - 1s 11ms/step - loss: 0.4594 - accuracy: 0.8481 - val_loss: 0.5071 - val_accuracy: 0.8389
Epoch 10/60
53/53 [==============================] - 1s 10ms/step - loss: 0.4234 - accuracy: 0.8570 - val_loss: 0.4845 - val_accuracy: 0.8465
Epoch 11/60
53/53 [==============================] - 1s 10ms/step - loss: 0.3864 - accuracy: 0.8723 - val_loss: 0.4744 - val_accuracy: 0.8559
Epoch 12/60
53/53 [==============================] - 1s 10ms/step - loss: 0.3318 - accuracy: 0.8899 - val_loss: 0.4586 - val_accuracy: 0.8623
Epoch 13/60
53/53 [==============================] - 1s 12ms/step - loss: 0.3091 - accuracy: 0.8971 - val_loss: 0.4478 - val_accuracy: 0.8635
Epoch 14/60
53/53 [==============================] - 1s 12ms/step - loss: 0.2902 - accuracy: 0.9008 - val_loss: 0.4648 - val_accuracy: 0.8581
Epoch 15/60
53/53 [==============================] - 1s 11ms/step - loss: 0.2552 - accuracy: 0.9135 - val_loss: 0.4458 - val_accuracy: 0.8702
Epoch 16/60
53/53 [==============================] - 1s 10ms/step - loss: 0.2454 - accuracy: 0.9156 - val_loss: 0.4139 - val_accuracy: 0.8802
Epoch 17/60
53/53 [==============================] - 1s 12ms/step - loss: 0.2398 - accuracy: 0.9205 - val_loss: 0.4330 - val_accuracy: 0.8799
Epoch 18/60
53/53 [==============================] - 1s 10ms/step - loss: 0.2150 - accuracy: 0.9289 - val_loss: 0.4204 - val_accuracy: 0.8808
Epoch 19/60
53/53 [==============================] - 1s 12ms/step - loss: 0.2010 - accuracy: 0.9323 - val_loss: 0.3928 - val_accuracy: 0.8893
Epoch 20/60
53/53 [==============================] - 1s 13ms/step - loss: 0.1871 - accuracy: 0.9380 - val_loss: 0.4137 - val_accuracy: 0.8923
Epoch 21/60
53/53 [==============================] - 1s 10ms/step - loss: 0.1701 - accuracy: 0.9441 - val_loss: 0.4126 - val_accuracy: 0.8890
Epoch 22/60
53/53 [==============================] - 1s 10ms/step - loss: 0.1736 - accuracy: 0.9446 - val_loss: 0.4049 - val_accuracy: 0.8926
Epoch 23/60
53/53 [==============================] - 1s 12ms/step - loss: 0.1502 - accuracy: 0.9510 - val_loss: 0.4252 - val_accuracy: 0.8902
Epoch 24/60
53/53 [==============================] - 1s 12ms/step - loss: 0.1549 - accuracy: 0.9502 - val_loss: 0.4160 - val_accuracy: 0.8860
Epoch 25/60
53/53 [==============================] - 1s 10ms/step - loss: 0.1515 - accuracy: 0.9541 - val_loss: 0.3907 - val_accuracy: 0.9035
Epoch 26/60
53/53 [==============================] - 1s 12ms/step - loss: 0.1530 - accuracy: 0.9505 - val_loss: 0.3912 - val_accuracy: 0.8993
Epoch 27/60
53/53 [==============================] - 1s 12ms/step - loss: 0.1229 - accuracy: 0.9601 - val_loss: 0.4161 - val_accuracy: 0.8990
Epoch 28/60
53/53 [==============================] - 1s 10ms/step - loss: 0.1170 - accuracy: 0.9606 - val_loss: 0.4241 - val_accuracy: 0.8932
Epoch 29/60
53/53 [==============================] - 1s 12ms/step - loss: 0.1191 - accuracy: 0.9634 - val_loss: 0.4112 - val_accuracy: 0.9020
Epoch 30/60
53/53 [==============================] - 1s 12ms/step - loss: 0.1089 - accuracy: 0.9655 - val_loss: 0.4665 - val_accuracy: 0.8917
Epoch 31/60
53/53 [==============================] - 1s 12ms/step - loss: 0.1137 - accuracy: 0.9603 - val_loss: 0.4509 - val_accuracy: 0.8978
Epoch 32/60
53/53 [==============================] - 1s 12ms/step - loss: 0.1036 - accuracy: 0.9638 - val_loss: 0.3938 - val_accuracy: 0.9042
Epoch 33/60
53/53 [==============================] - 1s 10ms/step - loss: 0.0919 - accuracy: 0.9709 - val_loss: 0.3927 - val_accuracy: 0.9072
Epoch 34/60
53/53 [==============================] - 1s 12ms/step - loss: 0.0789 - accuracy: 0.9749 - val_loss: 0.4309 - val_accuracy: 0.9011
Epoch 35/60
53/53 [==============================] - 1s 10ms/step - loss: 0.0927 - accuracy: 0.9671 - val_loss: 0.3981 - val_accuracy: 0.9035
Epoch 36/60
53/53 [==============================] - 1s 12ms/step - loss: 0.0911 - accuracy: 0.9721 - val_loss: 0.4424 - val_accuracy: 0.9011
Epoch 37/60
53/53 [==============================] - 1s 12ms/step - loss: 0.0751 - accuracy: 0.9753 - val_loss: 0.4334 - val_accuracy: 0.9039
Epoch 38/60
53/53 [==============================] - 1s 12ms/step - loss: 0.0775 - accuracy: 0.9742 - val_loss: 0.4342 - val_accuracy: 0.9029
Epoch 39/60
53/53 [==============================] - 1s 12ms/step - loss: 0.0771 - accuracy: 0.9742 - val_loss: 0.4147 - val_accuracy: 0.9099
Epoch 40/60
53/53 [==============================] - 1s 12ms/step - loss: 0.0756 - accuracy: 0.9764 - val_loss: 0.4106 - val_accuracy: 0.9054
Epoch 41/60
53/53 [==============================] - 1s 12ms/step - loss: 0.0772 - accuracy: 0.9725 - val_loss: 0.4266 - val_accuracy: 0.9081
Epoch 42/60
53/53 [==============================] - 1s 11ms/step - loss: 0.0740 - accuracy: 0.9749 - val_loss: 0.4359 - val_accuracy: 0.9066
Epoch 43/60
53/53 [==============================] - 1s 11ms/step - loss: 0.0854 - accuracy: 0.9721 - val_loss: 0.4573 - val_accuracy: 0.8954
Epoch 44/60
53/53 [==============================] - 1s 11ms/step - loss: 0.0857 - accuracy: 0.9722 - val_loss: 0.4113 - val_accuracy: 0.9017
Epoch 45/60
53/53 [==============================] - 1s 12ms/step - loss: 0.0686 - accuracy: 0.9780 - val_loss: 0.4350 - val_accuracy: 0.9048
Epoch 46/60
53/53 [==============================] - 1s 10ms/step - loss: 0.0716 - accuracy: 0.9771 - val_loss: 0.4190 - val_accuracy: 0.9057
Epoch 47/60
53/53 [==============================] - 1s 10ms/step - loss: 0.0628 - accuracy: 0.9794 - val_loss: 0.4270 - val_accuracy: 0.9084
Epoch 48/60
53/53 [==============================] - 1s 12ms/step - loss: 0.0601 - accuracy: 0.9816 - val_loss: 0.4251 - val_accuracy: 0.9105
Epoch 49/60
53/53 [==============================] - 1s 10ms/step - loss: 0.0603 - accuracy: 0.9797 - val_loss: 0.4482 - val_accuracy: 0.9045
Epoch 50/60
53/53 [==============================] - 1s 10ms/step - loss: 0.0714 - accuracy: 0.9758 - val_loss: 0.4274 - val_accuracy: 0.9032
Epoch 51/60
53/53 [==============================] - 1s 12ms/step - loss: 0.0555 - accuracy: 0.9815 - val_loss: 0.4438 - val_accuracy: 0.9039
Epoch 52/60
53/53 [==============================] - 1s 12ms/step - loss: 0.0605 - accuracy: 0.9812 - val_loss: 0.4573 - val_accuracy: 0.9048
Epoch 53/60
53/53 [==============================] - 1s 12ms/step - loss: 0.0594 - accuracy: 0.9822 - val_loss: 0.4368 - val_accuracy: 0.9069
Epoch 54/60
53/53 [==============================] - 1s 10ms/step - loss: 0.0555 - accuracy: 0.9813 - val_loss: 0.4416 - val_accuracy: 0.9123
Epoch 55/60
53/53 [==============================] - 1s 10ms/step - loss: 0.0548 - accuracy: 0.9824 - val_loss: 0.4331 - val_accuracy: 0.9090
Epoch 56/60
53/53 [==============================] - 1s 10ms/step - loss: 0.0590 - accuracy: 0.9810 - val_loss: 0.4234 - val_accuracy: 0.9139
Epoch 57/60
53/53 [==============================] - 1s 10ms/step - loss: 0.0590 - accuracy: 0.9816 - val_loss: 0.4478 - val_accuracy: 0.9105
Epoch 58/60
53/53 [==============================] - 1s 10ms/step - loss: 0.0597 - accuracy: 0.9809 - val_loss: 0.4552 - val_accuracy: 0.9042
Epoch 59/60
53/53 [==============================] - 1s 11ms/step - loss: 0.0535 - accuracy: 0.9828 - val_loss: 0.4356 - val_accuracy: 0.9102
Epoch 60/60
53/53 [==============================] - 1s 12ms/step - loss: 0.0543 - accuracy: 0.9830 - val_loss: 0.4655 - val_accuracy: 0.9051
26/26 [==============================] - 0s 3ms/step - loss: 0.4655 - accuracy: 0.9051
The test Loss is: 0.46553367376327515

The Best test Accuracy is: 90.50652384757996
In [ ]:
X.shape[1]
Out[ ]:
58
In [ ]:
y
Out[ ]:
array([0, 0, 0, ..., 9, 9, 9])

Prediction¶

In [ ]:
predictions = model.predict(X_test)
np.argmax(predictions[2])
Out[ ]:
3
In [ ]:
def make_prediction(model, X, y, idx):
    
    genre_dict = {
        0 : "blues",
        1 : "classical",
        2 : "country",
        3 : "disco",
        4 : "hiphop",
        5 : "jazz",
        6 : "metal",
        7 : "pop",
        8 : "reggae",
        9 : "rock",
        }
        
    predictions = model.predict(X)
    genre = np.argmax(predictions[idx])
    
    print("\n---Now testing the model for one audio file---\nThe model predicts: {}, and ground truth is: {}.\n".format(genre_dict[genre], genre_dict[y[idx]]))
In [ ]:
make_prediction(model, X_test, y_test,9)
---Now testing the model for one audio file---
The model predicts: metal, and ground truth is: metal.

In [ ]:
X_train.shape
Out[ ]:
(6693, 58, 1)

Building a RNN Model¶

In [ ]:
from keras.layers import Dense, Embedding, LSTM, SpatialDropout1D,Bidirectional


model1 = keras.Sequential()
model1.add(LSTM(256,return_sequences=True, recurrent_dropout=0.5,input_shape=(X_train.shape[1],1)))
model1.add(LSTM(128,return_sequences=True, recurrent_dropout=0.5))
model1.add(LSTM(64,recurrent_dropout=0.5))


model1.add(Dense(10,activation='softmax'))
opt = keras.optimizers.Adam(learning_rate=1e-2)
model1.compile(loss = 'sparse_categorical_crossentropy', optimizer=opt,metrics = ['accuracy'])
print(model1.summary())
Model: "sequential_1"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 lstm (LSTM)                 (None, 58, 256)           264192    
                                                                 
 lstm_1 (LSTM)               (None, 58, 128)           197120    
                                                                 
 lstm_2 (LSTM)               (None, 64)                49408     
                                                                 
 dense_5 (Dense)             (None, 10)                650       
                                                                 
=================================================================
Total params: 511,370
Trainable params: 511,370
Non-trainable params: 0
_________________________________________________________________
None
In [ ]:
model_history=trainModel(model=model1,epochs=7,optimizer='adam')
Epoch 1/7
53/53 [==============================] - 75s 1s/step - loss: 1.9308 - accuracy: 0.2597 - val_loss: 1.7425 - val_accuracy: 0.3406
Epoch 2/7
53/53 [==============================] - 67s 1s/step - loss: 1.7187 - accuracy: 0.3478 - val_loss: 1.6716 - val_accuracy: 0.3685
Epoch 3/7
53/53 [==============================] - 67s 1s/step - loss: 1.6419 - accuracy: 0.3825 - val_loss: 1.6123 - val_accuracy: 0.4028
Epoch 4/7
53/53 [==============================] - 68s 1s/step - loss: 1.5918 - accuracy: 0.4068 - val_loss: 1.5783 - val_accuracy: 0.4177
Epoch 5/7
53/53 [==============================] - 67s 1s/step - loss: 1.5748 - accuracy: 0.4143 - val_loss: 1.5446 - val_accuracy: 0.4304
Epoch 6/7
53/53 [==============================] - 65s 1s/step - loss: 1.5215 - accuracy: 0.4403 - val_loss: 1.5093 - val_accuracy: 0.4331
Epoch 7/7
53/53 [==============================] - 65s 1s/step - loss: 1.5190 - accuracy: 0.4372 - val_loss: 1.4895 - val_accuracy: 0.4571
In [ ]:
tset_loss,accuracy=model.evaluate(X_test,y_test)
104/104 [==============================] - 0s 2ms/step - loss: 0.4655 - accuracy: 0.9051
In [ ]:
tset_loss
accuracy
Out[ ]:
0.9050652384757996
In [ ]:
print("The test Loss is:",tset_loss)
print("\nThe Best test Accuracy is:",accuracy*100)
The test Loss is: 0.465533584356308

The Best test Accuracy is: 90.50652384757996

Perforcmance Evaluation¶

In [ ]:
pd.DataFrame(model_history.history).plot(figsize=(12,7),xlabel='Epochs');
In [ ]:
from sklearn.metrics import classification_report,confusion_matrix
import seaborn as sns
y_pred=tf.round(model1.predict(X_test))
y_pred=np.argmax(y_pred, axis=1)
conf_mat=confusion_matrix(y_test,y_pred)
fig,ax=plt.subplots(figsize=(14,7))
ax=sns.heatmap(conf_mat,cmap='coolwarm',annot=True)
In [ ]:
from sklearn.metrics import accuracy_score,precision_score,recall_score,f1_score
def evaluate_preds(y_test,y_pred):
  
  accuracy=accuracy_score(y_test,y_pred)
  precision=precision_score(y_test,y_pred,average='macro')
  recall=recall_score(y_test,y_pred,average='macro')
  f1=f1_score(y_test,y_pred,average='macro')
  metric_dict={'Accuracy':round(accuracy*100,2),
      "Precision":round(precision*100,2),
      "Recall":round(recall*100,2),
      "F1-Score":round(f1*100,2)}
  return metric_dict
In [ ]:
performance_mat=evaluate_preds(y_test,y_pred)
perf_mat_series=pd.Series(performance_mat)
perf_mat_series
/usr/local/lib/python3.7/dist-packages/sklearn/metrics/_classification.py:1318: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.
  _warn_prf(average, modifier, msg_start, len(result))
Out[ ]:
Accuracy     33.48
Precision    53.67
Recall       33.51
F1-Score     30.68
dtype: float64
In [ ]:
fig,ax=plt.subplots(figsize=(12,7))
ax=perf_mat_series.plot(kind='bar')
In [ ]:

Experiment-2¶

In [ ]:
from keras.layers import Dense, Embedding, LSTM, SpatialDropout1D,Bidirectional


model1 = keras.Sequential()
model1.add(LSTM(256, recurrent_dropout=0.5,input_shape=(X_train.shape[1],1)))
#model1.add(LSTM(128,return_sequences=True, recurrent_dropout=0.5))
#model1.add(LSTM(64,recurrent_dropout=0.5))


model1.add(Dense(10,activation='softmax'))
opt = keras.optimizers.Adam(learning_rate=1e-2)
model1.compile(loss = 'sparse_categorical_crossentropy', optimizer=opt,metrics = ['accuracy'])
print(model1.summary())
Model: "sequential_2"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 lstm_3 (LSTM)               (None, 256)               264192    
                                                                 
 dense_6 (Dense)             (None, 10)                2570      
                                                                 
=================================================================
Total params: 266,762
Trainable params: 266,762
Non-trainable params: 0
_________________________________________________________________
None
In [ ]:
model_history=trainModel(model=model1,epochs=7,optimizer='adam')
Epoch 1/7
53/53 [==============================] - 37s 668ms/step - loss: 1.9937 - accuracy: 0.2350 - val_loss: 1.9454 - val_accuracy: 0.2584
Epoch 2/7
53/53 [==============================] - 35s 666ms/step - loss: 1.7955 - accuracy: 0.3339 - val_loss: 1.7553 - val_accuracy: 0.3470
Epoch 3/7
53/53 [==============================] - 36s 671ms/step - loss: 1.6943 - accuracy: 0.3650 - val_loss: 1.6412 - val_accuracy: 0.4022
Epoch 4/7
53/53 [==============================] - 35s 666ms/step - loss: 1.6205 - accuracy: 0.3949 - val_loss: 1.6447 - val_accuracy: 0.3904
Epoch 5/7
53/53 [==============================] - 35s 669ms/step - loss: 1.5936 - accuracy: 0.4058 - val_loss: 1.5821 - val_accuracy: 0.4243
Epoch 6/7
53/53 [==============================] - 35s 657ms/step - loss: 1.5683 - accuracy: 0.4213 - val_loss: 1.5784 - val_accuracy: 0.4076
Epoch 7/7
53/53 [==============================] - 36s 674ms/step - loss: 1.5573 - accuracy: 0.4245 - val_loss: 1.5188 - val_accuracy: 0.4352
In [ ]:
tset_loss,accuracy=model.evaluate(X_test,y_test)
print("The test Loss is:",tset_loss)
print("\nThe Best test Accuracy is:",accuracy*100)
104/104 [==============================] - 0s 2ms/step - loss: 0.4655 - accuracy: 0.9051
The test Loss is: 0.465533584356308

The Best test Accuracy is: 90.50652384757996
In [ ]:
model_history=trainModel(model=model1,epochs=20,optimizer='adam')
Epoch 1/20
53/53 [==============================] - 38s 682ms/step - loss: 1.5284 - accuracy: 0.4372 - val_loss: 1.4785 - val_accuracy: 0.4613
Epoch 2/20
53/53 [==============================] - 35s 667ms/step - loss: 1.5072 - accuracy: 0.4475 - val_loss: 1.5103 - val_accuracy: 0.4374
Epoch 3/20
53/53 [==============================] - 36s 671ms/step - loss: 1.4781 - accuracy: 0.4539 - val_loss: 1.4559 - val_accuracy: 0.4713
Epoch 4/20
53/53 [==============================] - 35s 658ms/step - loss: 1.4575 - accuracy: 0.4641 - val_loss: 1.4439 - val_accuracy: 0.4650
Epoch 5/20
53/53 [==============================] - 34s 640ms/step - loss: 1.4396 - accuracy: 0.4736 - val_loss: 1.4416 - val_accuracy: 0.4813
Epoch 6/20
53/53 [==============================] - 36s 674ms/step - loss: 1.4430 - accuracy: 0.4738 - val_loss: 1.4489 - val_accuracy: 0.4789
Epoch 7/20
53/53 [==============================] - 35s 670ms/step - loss: 1.4243 - accuracy: 0.4839 - val_loss: 1.3930 - val_accuracy: 0.5102
Epoch 8/20
53/53 [==============================] - 35s 670ms/step - loss: 1.4012 - accuracy: 0.4887 - val_loss: 1.4017 - val_accuracy: 0.4926
Epoch 9/20
53/53 [==============================] - 35s 667ms/step - loss: 1.3926 - accuracy: 0.4922 - val_loss: 1.3546 - val_accuracy: 0.5205
Epoch 10/20
53/53 [==============================] - 35s 667ms/step - loss: 1.3566 - accuracy: 0.5135 - val_loss: 1.3385 - val_accuracy: 0.5211
Epoch 11/20
53/53 [==============================] - 36s 672ms/step - loss: 1.3416 - accuracy: 0.5174 - val_loss: 1.3386 - val_accuracy: 0.5278
Epoch 12/20
53/53 [==============================] - 35s 669ms/step - loss: 1.3114 - accuracy: 0.5298 - val_loss: 1.3134 - val_accuracy: 0.5350
Epoch 13/20
53/53 [==============================] - 36s 672ms/step - loss: 1.3082 - accuracy: 0.5317 - val_loss: 1.3135 - val_accuracy: 0.5302
Epoch 14/20
53/53 [==============================] - 36s 674ms/step - loss: 1.2802 - accuracy: 0.5407 - val_loss: 1.3097 - val_accuracy: 0.5293
Epoch 15/20
53/53 [==============================] - 36s 671ms/step - loss: 1.2593 - accuracy: 0.5489 - val_loss: 1.2869 - val_accuracy: 0.5381
Epoch 16/20
53/53 [==============================] - 36s 676ms/step - loss: 1.2563 - accuracy: 0.5537 - val_loss: 1.2460 - val_accuracy: 0.5599
Epoch 17/20
53/53 [==============================] - 36s 672ms/step - loss: 1.2110 - accuracy: 0.5693 - val_loss: 1.2456 - val_accuracy: 0.5629
Epoch 18/20
53/53 [==============================] - 36s 686ms/step - loss: 1.2065 - accuracy: 0.5725 - val_loss: 1.2348 - val_accuracy: 0.5638
Epoch 19/20
53/53 [==============================] - 36s 672ms/step - loss: 1.1852 - accuracy: 0.5830 - val_loss: 1.2434 - val_accuracy: 0.5714
Epoch 20/20
53/53 [==============================] - 36s 682ms/step - loss: 1.1745 - accuracy: 0.5830 - val_loss: 1.2432 - val_accuracy: 0.5611
In [ ]:
tset_loss,accuracy=model.evaluate(X_test,y_test)
print("The test Loss is:",tset_loss)
print("\nThe Best test Accuracy is:",accuracy*100)
104/104 [==============================] - 0s 2ms/step - loss: 0.4655 - accuracy: 0.9051
The test Loss is: 0.465533584356308

The Best test Accuracy is: 90.50652384757996
In [ ]:
pd.DataFrame(model_history.history).plot(figsize=(12,7),xlabel='Epochs');
In [ ]:
from sklearn.metrics import classification_report,confusion_matrix
import seaborn as sns
y_pred=tf.round(model1.predict(X_test))
y_pred=np.argmax(y_pred, axis=1)
conf_mat=confusion_matrix(y_test,y_pred)
fig,ax=plt.subplots(figsize=(14,7))
ax=sns.heatmap(conf_mat,cmap='coolwarm',annot=True)
In [ ]:
from sklearn.metrics import accuracy_score,precision_score,recall_score,f1_score
def evaluate_preds(y_test,y_pred):
  
  accuracy=accuracy_score(y_test,y_pred)
  precision=precision_score(y_test,y_pred,average='macro')
  recall=recall_score(y_test,y_pred,average='macro')
  f1=f1_score(y_test,y_pred,average='macro')
  metric_dict={'Accuracy':round(accuracy*100,2),
      "Precision":round(precision*100,2),
      "Recall":round(recall*100,2),
      "F1-Score":round(f1*100,2)}
  return metric_dict

performance_mat=evaluate_preds(y_test,y_pred)
perf_mat_series=pd.Series(performance_mat)
perf_mat_series
Out[ ]:
Accuracy     45.65
Precision    63.20
Recall       45.23
F1-Score     45.20
dtype: float64
In [ ]:
fig,ax=plt.subplots(figsize=(12,7))
ax=perf_mat_series.plot(kind='bar')
In [ ]:
from keras.layers import Dense, Embedding, LSTM, SpatialDropout1D,Bidirectional


model = keras.Sequential()
model.add(LSTM(256, recurrent_dropout=0.5,input_shape=(X_train.shape[1],1)))
#model1.add(LSTM(128,return_sequences=True, recurrent_dropout=0.5))
#model1.add(LSTM(64,recurrent_dropout=0.5))


model.add(Dense(10,activation='softmax'))
opt = keras.optimizers.Adam(learning_rate=0.001)
model.compile(loss = 'sparse_categorical_crossentropy', optimizer=opt,metrics = ['accuracy'])
print(model.summary())
Model: "sequential_3"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 lstm_4 (LSTM)               (None, 256)               264192    
                                                                 
 dense_7 (Dense)             (None, 10)                2570      
                                                                 
=================================================================
Total params: 266,762
Trainable params: 266,762
Non-trainable params: 0
_________________________________________________________________
None
In [ ]:
model_history=trainModel(model=model,epochs=20,optimizer='adam')
Epoch 1/20
53/53 [==============================] - 38s 688ms/step - loss: 1.9943 - accuracy: 0.2301 - val_loss: 1.8158 - val_accuracy: 0.3291
Epoch 2/20
53/53 [==============================] - 36s 678ms/step - loss: 1.7658 - accuracy: 0.3383 - val_loss: 1.8256 - val_accuracy: 0.3515
Epoch 3/20
53/53 [==============================] - 36s 673ms/step - loss: 1.6913 - accuracy: 0.3779 - val_loss: 1.7001 - val_accuracy: 0.3700
Epoch 4/20
53/53 [==============================] - 37s 697ms/step - loss: 1.6394 - accuracy: 0.3808 - val_loss: 1.5991 - val_accuracy: 0.4158
Epoch 5/20
53/53 [==============================] - 38s 717ms/step - loss: 1.5926 - accuracy: 0.4091 - val_loss: 1.5688 - val_accuracy: 0.4146
Epoch 6/20
53/53 [==============================] - 37s 698ms/step - loss: 1.5556 - accuracy: 0.4245 - val_loss: 1.5076 - val_accuracy: 0.4556
Epoch 7/20
53/53 [==============================] - 37s 694ms/step - loss: 1.5291 - accuracy: 0.4324 - val_loss: 1.5414 - val_accuracy: 0.4431
Epoch 8/20
53/53 [==============================] - 36s 689ms/step - loss: 1.5221 - accuracy: 0.4437 - val_loss: 1.4865 - val_accuracy: 0.4568
Epoch 9/20
53/53 [==============================] - 37s 692ms/step - loss: 1.5114 - accuracy: 0.4463 - val_loss: 1.4994 - val_accuracy: 0.4519
Epoch 10/20
53/53 [==============================] - 36s 680ms/step - loss: 1.4909 - accuracy: 0.4575 - val_loss: 1.5166 - val_accuracy: 0.4556
Epoch 11/20
53/53 [==============================] - 36s 677ms/step - loss: 1.4715 - accuracy: 0.4623 - val_loss: 1.4645 - val_accuracy: 0.4592
Epoch 12/20
53/53 [==============================] - 36s 674ms/step - loss: 1.4724 - accuracy: 0.4582 - val_loss: 1.4759 - val_accuracy: 0.4583
Epoch 13/20
53/53 [==============================] - 34s 650ms/step - loss: 1.4532 - accuracy: 0.4688 - val_loss: 1.3869 - val_accuracy: 0.5050
Epoch 14/20
53/53 [==============================] - 35s 666ms/step - loss: 1.4083 - accuracy: 0.4877 - val_loss: 1.3979 - val_accuracy: 0.4974
Epoch 15/20
53/53 [==============================] - 35s 653ms/step - loss: 1.3889 - accuracy: 0.5008 - val_loss: 1.3798 - val_accuracy: 0.5120
Epoch 16/20
53/53 [==============================] - 35s 655ms/step - loss: 1.3756 - accuracy: 0.4968 - val_loss: 1.3961 - val_accuracy: 0.4923
Epoch 17/20
53/53 [==============================] - 36s 689ms/step - loss: 1.3468 - accuracy: 0.5120 - val_loss: 1.3420 - val_accuracy: 0.5299
Epoch 18/20
53/53 [==============================] - 35s 656ms/step - loss: 1.3501 - accuracy: 0.5176 - val_loss: 1.3245 - val_accuracy: 0.5399
Epoch 19/20
53/53 [==============================] - 33s 630ms/step - loss: 1.3209 - accuracy: 0.5325 - val_loss: 1.2945 - val_accuracy: 0.5350
Epoch 20/20
53/53 [==============================] - 33s 623ms/step - loss: 1.2867 - accuracy: 0.5401 - val_loss: 1.3152 - val_accuracy: 0.5369
In [ ]:
tset_loss,accuracy=model.evaluate(X_test,y_test)
print("The test Loss is:",tset_loss)
print("\nThe Best test Accuracy is:",accuracy*100)
104/104 [==============================] - 4s 38ms/step - loss: 1.3152 - accuracy: 0.5369
The test Loss is: 1.3152064085006714

The Best test Accuracy is: 53.6851704120636
In [ ]:
pd.DataFrame(model_history.history).plot(figsize=(12,7),xlabel='Epochs');
In [ ]:
from sklearn.metrics import classification_report,confusion_matrix
import seaborn as sns
y_pred=tf.round(model1.predict(X_test))
y_pred=np.argmax(y_pred, axis=1)
conf_mat=confusion_matrix(y_test,y_pred)
fig,ax=plt.subplots(figsize=(14,7))
ax=sns.heatmap(conf_mat,cmap='coolwarm',annot=True)
In [ ]:
from sklearn.metrics import accuracy_score,precision_score,recall_score,f1_score
def evaluate_preds(y_test,y_pred):
  
  accuracy=accuracy_score(y_test,y_pred)
  precision=precision_score(y_test,y_pred,average='macro')
  recall=recall_score(y_test,y_pred,average='macro')
  f1=f1_score(y_test,y_pred,average='macro')
  metric_dict={'Accuracy':round(accuracy*100,2),
      "Precision":round(precision*100,2),
      "Recall":round(recall*100,2),
      "F1-Score":round(f1*100,2)}
  return metric_dict

performance_mat=evaluate_preds(y_test,y_pred)
perf_mat_series=pd.Series(performance_mat)
perf_mat_series
Out[ ]:
Accuracy     45.65
Precision    63.20
Recall       45.23
F1-Score     45.20
dtype: float64
In [ ]:
fig,ax=plt.subplots(figsize=(12,7))
ax=perf_mat_series.plot(kind='bar')

Experiment3¶

In [ ]:
from keras.layers import Dense, Embedding, LSTM, SpatialDropout1D,Bidirectional


model = keras.Sequential()
model.add(LSTM(256,recurrent_dropout=0.5,input_shape=(X_train.shape[1],1)))
#model.add(LSTM(128,return_sequences=True, recurrent_dropout=0.5))
#model.add(LSTM(64,recurrent_dropout=0.5))


model.add(Dense(10,activation='softmax'))
opt = keras.optimizers.Adam(learning_rate=1e-1)
model.compile(loss = 'sparse_categorical_crossentropy', optimizer=opt,metrics = ['accuracy'])
print(model.summary())
Model: "sequential_4"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 lstm_5 (LSTM)               (None, 256)               264192    
                                                                 
 dense_8 (Dense)             (None, 10)                2570      
                                                                 
=================================================================
Total params: 266,762
Trainable params: 266,762
Non-trainable params: 0
_________________________________________________________________
None
In [ ]:
#Batch size-128
model_history=trainModel(model=model,epochs=40,optimizer='adam')
Epoch 1/40
53/53 [==============================] - 34s 617ms/step - loss: 2.0115 - accuracy: 0.2258 - val_loss: 1.8778 - val_accuracy: 0.2985
Epoch 2/40
53/53 [==============================] - 32s 612ms/step - loss: 1.7706 - accuracy: 0.3353 - val_loss: 1.7381 - val_accuracy: 0.3582
Epoch 3/40
53/53 [==============================] - 32s 614ms/step - loss: 1.6839 - accuracy: 0.3762 - val_loss: 1.6302 - val_accuracy: 0.3873
Epoch 4/40
53/53 [==============================] - 33s 617ms/step - loss: 1.6059 - accuracy: 0.3921 - val_loss: 1.5672 - val_accuracy: 0.4292
Epoch 5/40
53/53 [==============================] - 33s 620ms/step - loss: 1.5742 - accuracy: 0.4127 - val_loss: 1.5758 - val_accuracy: 0.4195
Epoch 6/40
53/53 [==============================] - 32s 613ms/step - loss: 1.5577 - accuracy: 0.4204 - val_loss: 1.5137 - val_accuracy: 0.4383
Epoch 7/40
53/53 [==============================] - 33s 614ms/step - loss: 1.5312 - accuracy: 0.4367 - val_loss: 1.4907 - val_accuracy: 0.4540
Epoch 8/40
53/53 [==============================] - 33s 614ms/step - loss: 1.4946 - accuracy: 0.4457 - val_loss: 1.4890 - val_accuracy: 0.4577
Epoch 9/40
53/53 [==============================] - 33s 623ms/step - loss: 1.5048 - accuracy: 0.4511 - val_loss: 1.5748 - val_accuracy: 0.3952
Epoch 10/40
53/53 [==============================] - 35s 665ms/step - loss: 1.4986 - accuracy: 0.4436 - val_loss: 1.5172 - val_accuracy: 0.4337
Epoch 11/40
53/53 [==============================] - 35s 659ms/step - loss: 1.4513 - accuracy: 0.4672 - val_loss: 1.4380 - val_accuracy: 0.4713
Epoch 12/40
53/53 [==============================] - 34s 651ms/step - loss: 1.4517 - accuracy: 0.4600 - val_loss: 1.4207 - val_accuracy: 0.4850
Epoch 13/40
53/53 [==============================] - 35s 662ms/step - loss: 1.4398 - accuracy: 0.4711 - val_loss: 1.4775 - val_accuracy: 0.4638
Epoch 14/40
53/53 [==============================] - 35s 660ms/step - loss: 1.4096 - accuracy: 0.4868 - val_loss: 1.3910 - val_accuracy: 0.4950
Epoch 15/40
53/53 [==============================] - 35s 659ms/step - loss: 1.3923 - accuracy: 0.4941 - val_loss: 1.3571 - val_accuracy: 0.5232
Epoch 16/40
53/53 [==============================] - 35s 657ms/step - loss: 1.3686 - accuracy: 0.5071 - val_loss: 1.3436 - val_accuracy: 0.5281
Epoch 17/40
53/53 [==============================] - 35s 656ms/step - loss: 1.3600 - accuracy: 0.5083 - val_loss: 1.3776 - val_accuracy: 0.5093
Epoch 18/40
53/53 [==============================] - 35s 659ms/step - loss: 1.3308 - accuracy: 0.5261 - val_loss: 1.3184 - val_accuracy: 0.5375
Epoch 19/40
53/53 [==============================] - 35s 666ms/step - loss: 1.3057 - accuracy: 0.5291 - val_loss: 1.2944 - val_accuracy: 0.5472
Epoch 20/40
53/53 [==============================] - 35s 662ms/step - loss: 1.2982 - accuracy: 0.5356 - val_loss: 1.3175 - val_accuracy: 0.5335
Epoch 21/40
53/53 [==============================] - 35s 666ms/step - loss: 1.2683 - accuracy: 0.5510 - val_loss: 1.3184 - val_accuracy: 0.5414
Epoch 22/40
53/53 [==============================] - 35s 665ms/step - loss: 1.2459 - accuracy: 0.5521 - val_loss: 1.2567 - val_accuracy: 0.5551
Epoch 23/40
53/53 [==============================] - 35s 662ms/step - loss: 1.2158 - accuracy: 0.5646 - val_loss: 1.2253 - val_accuracy: 0.5645
Epoch 24/40
53/53 [==============================] - 34s 649ms/step - loss: 1.1985 - accuracy: 0.5761 - val_loss: 1.1954 - val_accuracy: 0.5808
Epoch 25/40
53/53 [==============================] - 34s 650ms/step - loss: 1.1721 - accuracy: 0.5834 - val_loss: 1.2067 - val_accuracy: 0.5896
Epoch 26/40
53/53 [==============================] - 35s 653ms/step - loss: 1.1737 - accuracy: 0.5843 - val_loss: 1.1807 - val_accuracy: 0.5890
Epoch 27/40
53/53 [==============================] - 34s 648ms/step - loss: 1.1440 - accuracy: 0.5936 - val_loss: 1.2164 - val_accuracy: 0.5790
Epoch 28/40
53/53 [==============================] - 34s 635ms/step - loss: 1.1236 - accuracy: 0.6026 - val_loss: 1.1843 - val_accuracy: 0.5830
Epoch 29/40
53/53 [==============================] - 34s 651ms/step - loss: 1.1223 - accuracy: 0.6066 - val_loss: 1.1691 - val_accuracy: 0.5790
Epoch 30/40
53/53 [==============================] - 34s 646ms/step - loss: 1.1059 - accuracy: 0.6102 - val_loss: 1.1381 - val_accuracy: 0.6008
Epoch 31/40
53/53 [==============================] - 34s 652ms/step - loss: 1.0665 - accuracy: 0.6218 - val_loss: 1.1163 - val_accuracy: 0.6133
Epoch 32/40
53/53 [==============================] - 35s 660ms/step - loss: 1.0584 - accuracy: 0.6293 - val_loss: 1.1079 - val_accuracy: 0.6172
Epoch 33/40
53/53 [==============================] - 35s 659ms/step - loss: 1.0239 - accuracy: 0.6348 - val_loss: 1.1065 - val_accuracy: 0.6109
Epoch 34/40
53/53 [==============================] - 35s 654ms/step - loss: 1.0205 - accuracy: 0.6413 - val_loss: 1.0957 - val_accuracy: 0.6121
Epoch 35/40
53/53 [==============================] - 35s 653ms/step - loss: 1.0118 - accuracy: 0.6402 - val_loss: 1.1130 - val_accuracy: 0.6087
Epoch 36/40
53/53 [==============================] - 35s 659ms/step - loss: 1.0077 - accuracy: 0.6404 - val_loss: 1.1221 - val_accuracy: 0.6190
Epoch 37/40
53/53 [==============================] - 35s 658ms/step - loss: 0.9894 - accuracy: 0.6532 - val_loss: 1.0879 - val_accuracy: 0.6194
Epoch 38/40
53/53 [==============================] - 35s 661ms/step - loss: 0.9657 - accuracy: 0.6611 - val_loss: 1.0868 - val_accuracy: 0.6136
Epoch 39/40
53/53 [==============================] - 35s 656ms/step - loss: 0.9458 - accuracy: 0.6644 - val_loss: 1.0808 - val_accuracy: 0.6212
Epoch 40/40
53/53 [==============================] - 35s 658ms/step - loss: 0.9301 - accuracy: 0.6720 - val_loss: 1.0522 - val_accuracy: 0.6388
In [ ]:
tset_loss,accuracy=model.evaluate(X_test,y_test)
print("The test Loss is:",tset_loss)
print("\nThe Best test Accuracy is:",accuracy*100)
104/104 [==============================] - 4s 39ms/step - loss: 1.0522 - accuracy: 0.6388
The test Loss is: 1.052200198173523

The Best test Accuracy is: 63.87625336647034
In [ ]:
pd.DataFrame(model_history.history).plot(figsize=(12,7),xlabel='Epochs');
In [ ]:
from sklearn.metrics import classification_report,confusion_matrix
import seaborn as sns
y_pred=tf.round(model.predict(X_test))
y_pred=np.argmax(y_pred, axis=1)
conf_mat=confusion_matrix(y_test,y_pred)
fig,ax=plt.subplots(figsize=(14,7))
ax=sns.heatmap(conf_mat,cmap='coolwarm',annot=True)
In [ ]:
from sklearn.metrics import accuracy_score,precision_score,recall_score,f1_score
def evaluate_preds(y_test,y_pred):
  
  accuracy=accuracy_score(y_test,y_pred)
  precision=precision_score(y_test,y_pred,average='macro')
  recall=recall_score(y_test,y_pred,average='macro')
  f1=f1_score(y_test,y_pred,average='macro')
  metric_dict={'Accuracy':round(accuracy*100,2),
      "Precision":round(precision*100,2),
      "Recall":round(recall*100,2),
      "F1-Score":round(f1*100,2)}
  return metric_dict

performance_mat=evaluate_preds(y_test,y_pred)
perf_mat_series=pd.Series(performance_mat)
perf_mat_series
Out[ ]:
Accuracy     54.96
Precision    68.63
Recall       54.86
F1-Score     56.86
dtype: float64
In [ ]:
fig,ax=plt.subplots(figsize=(12,7))
ax=perf_mat_series.plot(kind='bar')
In [ ]:

Experiment4¶

In [ ]:
from keras.models import Sequential 
def trainModel(model,epochs,optimizer):
  batch_size=220
  model.compile(optimizer=optimizer,
                loss='sparse_categorical_crossentropy',
                metrics=['accuracy']
  )
  return model.fit(X_train,y_train,validation_data=(X_test,y_test),epochs=epochs,
                   batch_size=batch_size)
In [ ]:
def plotValidate(history):
  print("Validation Accuracy",max(history.history["val_accuracy"]))
  pd.DataFrame(history.history).pyplot
  pd.DataFrame(history.history).plot(figsize=(12,6))
In [ ]:
from keras.layers import Dense, Embedding, LSTM, SpatialDropout1D,Bidirectional


model = keras.Sequential()
model.add(LSTM(256,recurrent_dropout=0.5,input_shape=(X_train.shape[1],1)))
#model.add(LSTM(128,return_sequences=True, recurrent_dropout=0.5))
#model.add(LSTM(64,recurrent_dropout=0.5))


model.add(Dense(10,activation='softmax'))
opt = keras.optimizers.Adam(learning_rate=1e-1)
model.compile(loss = 'sparse_categorical_crossentropy', optimizer=opt,metrics = ['accuracy'])
print(model.summary())
Model: "sequential_5"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 lstm_6 (LSTM)               (None, 256)               264192    
                                                                 
 dense_9 (Dense)             (None, 10)                2570      
                                                                 
=================================================================
Total params: 266,762
Trainable params: 266,762
Non-trainable params: 0
_________________________________________________________________
None
In [ ]:
#Batch size-220
model_history=trainModel(model=model,epochs=40,optimizer='adam')
Epoch 1/40
31/31 [==============================] - 28s 843ms/step - loss: 2.0608 - accuracy: 0.2174 - val_loss: 1.9506 - val_accuracy: 0.2417
Epoch 2/40
31/31 [==============================] - 26s 830ms/step - loss: 1.7998 - accuracy: 0.3206 - val_loss: 1.7131 - val_accuracy: 0.3676
Epoch 3/40
31/31 [==============================] - 25s 805ms/step - loss: 1.7039 - accuracy: 0.3664 - val_loss: 1.6898 - val_accuracy: 0.3588
Epoch 4/40
31/31 [==============================] - 25s 814ms/step - loss: 1.6533 - accuracy: 0.3794 - val_loss: 1.6108 - val_accuracy: 0.3995
Epoch 5/40
31/31 [==============================] - 25s 809ms/step - loss: 1.6231 - accuracy: 0.3970 - val_loss: 1.5913 - val_accuracy: 0.4110
Epoch 6/40
31/31 [==============================] - 24s 786ms/step - loss: 1.5950 - accuracy: 0.4121 - val_loss: 1.5956 - val_accuracy: 0.4070
Epoch 7/40
31/31 [==============================] - 24s 787ms/step - loss: 1.5641 - accuracy: 0.4212 - val_loss: 1.5487 - val_accuracy: 0.4161
Epoch 8/40
31/31 [==============================] - 24s 790ms/step - loss: 1.5614 - accuracy: 0.4192 - val_loss: 1.5294 - val_accuracy: 0.4252
Epoch 9/40
31/31 [==============================] - 25s 795ms/step - loss: 1.5361 - accuracy: 0.4318 - val_loss: 1.5478 - val_accuracy: 0.4359
Epoch 10/40
31/31 [==============================] - 25s 793ms/step - loss: 1.5144 - accuracy: 0.4402 - val_loss: 1.5175 - val_accuracy: 0.4516
Epoch 11/40
31/31 [==============================] - 25s 794ms/step - loss: 1.4938 - accuracy: 0.4433 - val_loss: 1.4983 - val_accuracy: 0.4468
Epoch 12/40
31/31 [==============================] - 24s 784ms/step - loss: 1.4649 - accuracy: 0.4612 - val_loss: 1.4996 - val_accuracy: 0.4547
Epoch 13/40
31/31 [==============================] - 24s 784ms/step - loss: 1.4578 - accuracy: 0.4687 - val_loss: 1.4852 - val_accuracy: 0.4562
Epoch 14/40
31/31 [==============================] - 24s 785ms/step - loss: 1.4522 - accuracy: 0.4699 - val_loss: 1.4276 - val_accuracy: 0.4786
Epoch 15/40
31/31 [==============================] - 24s 786ms/step - loss: 1.4441 - accuracy: 0.4684 - val_loss: 1.4719 - val_accuracy: 0.4589
Epoch 16/40
31/31 [==============================] - 24s 777ms/step - loss: 1.4536 - accuracy: 0.4709 - val_loss: 1.4547 - val_accuracy: 0.4716
Epoch 17/40
31/31 [==============================] - 24s 780ms/step - loss: 1.4184 - accuracy: 0.4793 - val_loss: 1.4606 - val_accuracy: 0.4689
Epoch 18/40
31/31 [==============================] - 24s 785ms/step - loss: 1.4078 - accuracy: 0.4884 - val_loss: 1.4597 - val_accuracy: 0.4704
Epoch 19/40
31/31 [==============================] - 24s 785ms/step - loss: 1.4183 - accuracy: 0.4832 - val_loss: 1.3937 - val_accuracy: 0.4947
Epoch 20/40
31/31 [==============================] - 24s 774ms/step - loss: 1.3900 - accuracy: 0.4960 - val_loss: 1.3608 - val_accuracy: 0.5184
Epoch 21/40
31/31 [==============================] - 24s 780ms/step - loss: 1.3816 - accuracy: 0.5026 - val_loss: 1.3971 - val_accuracy: 0.5050
Epoch 22/40
31/31 [==============================] - 24s 780ms/step - loss: 1.3696 - accuracy: 0.5016 - val_loss: 1.3399 - val_accuracy: 0.5314
Epoch 23/40
31/31 [==============================] - 24s 770ms/step - loss: 1.3369 - accuracy: 0.5190 - val_loss: 1.3767 - val_accuracy: 0.5044
Epoch 24/40
31/31 [==============================] - 24s 775ms/step - loss: 1.3259 - accuracy: 0.5213 - val_loss: 1.3559 - val_accuracy: 0.5168
Epoch 25/40
31/31 [==============================] - 24s 775ms/step - loss: 1.3012 - accuracy: 0.5355 - val_loss: 1.2965 - val_accuracy: 0.5387
Epoch 26/40
31/31 [==============================] - 24s 772ms/step - loss: 1.2670 - accuracy: 0.5474 - val_loss: 1.2785 - val_accuracy: 0.5544
Epoch 27/40
31/31 [==============================] - 24s 783ms/step - loss: 1.2676 - accuracy: 0.5476 - val_loss: 1.2908 - val_accuracy: 0.5478
Epoch 28/40
31/31 [==============================] - 24s 784ms/step - loss: 1.2655 - accuracy: 0.5483 - val_loss: 1.2535 - val_accuracy: 0.5541
Epoch 29/40
31/31 [==============================] - 25s 799ms/step - loss: 1.2155 - accuracy: 0.5694 - val_loss: 1.2504 - val_accuracy: 0.5538
Epoch 30/40
31/31 [==============================] - 24s 789ms/step - loss: 1.2234 - accuracy: 0.5592 - val_loss: 1.2435 - val_accuracy: 0.5578
Epoch 31/40
31/31 [==============================] - 24s 792ms/step - loss: 1.1979 - accuracy: 0.5761 - val_loss: 1.2285 - val_accuracy: 0.5696
Epoch 32/40
31/31 [==============================] - 24s 785ms/step - loss: 1.1872 - accuracy: 0.5778 - val_loss: 1.2649 - val_accuracy: 0.5569
Epoch 33/40
31/31 [==============================] - 24s 784ms/step - loss: 1.1804 - accuracy: 0.5791 - val_loss: 1.2039 - val_accuracy: 0.5769
Epoch 34/40
31/31 [==============================] - 25s 795ms/step - loss: 1.1570 - accuracy: 0.5915 - val_loss: 1.2016 - val_accuracy: 0.5702
Epoch 35/40
31/31 [==============================] - 25s 799ms/step - loss: 1.1414 - accuracy: 0.5955 - val_loss: 1.2332 - val_accuracy: 0.5760
Epoch 36/40
31/31 [==============================] - 24s 779ms/step - loss: 1.1331 - accuracy: 0.5967 - val_loss: 1.2252 - val_accuracy: 0.5672
Epoch 37/40
31/31 [==============================] - 24s 787ms/step - loss: 1.1154 - accuracy: 0.6032 - val_loss: 1.1731 - val_accuracy: 0.5966
Epoch 38/40
31/31 [==============================] - 24s 788ms/step - loss: 1.1051 - accuracy: 0.6087 - val_loss: 1.1683 - val_accuracy: 0.5969
Epoch 39/40
31/31 [==============================] - 24s 786ms/step - loss: 1.0865 - accuracy: 0.6147 - val_loss: 1.1441 - val_accuracy: 0.5963
Epoch 40/40
31/31 [==============================] - 25s 793ms/step - loss: 1.0806 - accuracy: 0.6196 - val_loss: 1.1333 - val_accuracy: 0.5969
In [ ]:
tset_loss,accuracy=model.evaluate(X_test,y_test)
print("The test Loss is:",tset_loss)
print("\nThe Best test Accuracy is:",accuracy*100)
104/104 [==============================] - 4s 36ms/step - loss: 1.1333 - accuracy: 0.5969
The test Loss is: 1.1333277225494385

The Best test Accuracy is: 59.6906304359436
In [ ]:
pd.DataFrame(model_history.history).plot(figsize=(12,7),xlabel='Epochs');
In [ ]:
from sklearn.metrics import classification_report,confusion_matrix
import seaborn as sns
y_pred=tf.round(model.predict(X_test))
y_pred=np.argmax(y_pred, axis=1)
conf_mat=confusion_matrix(y_test,y_pred)
fig,ax=plt.subplots(figsize=(14,7))
ax=sns.heatmap(conf_mat,cmap='coolwarm',annot=True)
In [ ]:
from sklearn.metrics import accuracy_score,precision_score,recall_score,f1_score
def evaluate_preds(y_test,y_pred):
  
  accuracy=accuracy_score(y_test,y_pred)
  precision=precision_score(y_test,y_pred,average='macro')
  recall=recall_score(y_test,y_pred,average='macro')
  f1=f1_score(y_test,y_pred,average='macro')
  metric_dict={'Accuracy':round(accuracy*100,2),
      "Precision":round(precision*100,2),
      "Recall":round(recall*100,2),
      "F1-Score":round(f1*100,2)}
  return metric_dict

performance_mat=evaluate_preds(y_test,y_pred)
perf_mat_series=pd.Series(performance_mat)
perf_mat_series
Out[ ]:
Accuracy     50.05
Precision    66.29
Recall       49.86
F1-Score     51.99
dtype: float64
In [ ]:
fig,ax=plt.subplots(figsize=(12,7))
ax=perf_mat_series.plot(kind='bar')

Experiment5¶

In [ ]:
#Batch size-220
model_history=trainModel(model=model,epochs=50,optimizer='adam')
Epoch 1/50
31/31 [==============================] - 27s 803ms/step - loss: 1.0988 - accuracy: 0.6112 - val_loss: 1.1494 - val_accuracy: 0.5924
Epoch 2/50
31/31 [==============================] - 24s 787ms/step - loss: 1.0557 - accuracy: 0.6308 - val_loss: 1.1189 - val_accuracy: 0.6178
Epoch 3/50
31/31 [==============================] - 24s 787ms/step - loss: 1.0486 - accuracy: 0.6259 - val_loss: 1.1426 - val_accuracy: 0.6027
Epoch 4/50
31/31 [==============================] - 25s 793ms/step - loss: 1.0184 - accuracy: 0.6432 - val_loss: 1.0963 - val_accuracy: 0.6245
Epoch 5/50
31/31 [==============================] - 24s 782ms/step - loss: 1.0052 - accuracy: 0.6446 - val_loss: 1.0982 - val_accuracy: 0.6142
Epoch 6/50
31/31 [==============================] - 24s 782ms/step - loss: 0.9968 - accuracy: 0.6468 - val_loss: 1.1105 - val_accuracy: 0.6178
Epoch 7/50
31/31 [==============================] - 24s 790ms/step - loss: 0.9855 - accuracy: 0.6532 - val_loss: 1.1305 - val_accuracy: 0.6069
Epoch 8/50
31/31 [==============================] - 24s 785ms/step - loss: 0.9954 - accuracy: 0.6472 - val_loss: 1.0889 - val_accuracy: 0.6181
Epoch 9/50
31/31 [==============================] - 24s 789ms/step - loss: 0.9785 - accuracy: 0.6523 - val_loss: 1.0626 - val_accuracy: 0.6351
Epoch 10/50
31/31 [==============================] - 24s 785ms/step - loss: 0.9502 - accuracy: 0.6574 - val_loss: 1.0801 - val_accuracy: 0.6245
Epoch 11/50
31/31 [==============================] - 24s 790ms/step - loss: 0.9400 - accuracy: 0.6662 - val_loss: 1.0774 - val_accuracy: 0.6303
Epoch 12/50
31/31 [==============================] - 24s 787ms/step - loss: 0.9178 - accuracy: 0.6786 - val_loss: 1.0545 - val_accuracy: 0.6439
Epoch 13/50
31/31 [==============================] - 24s 785ms/step - loss: 0.9191 - accuracy: 0.6780 - val_loss: 1.0486 - val_accuracy: 0.6412
Epoch 14/50
31/31 [==============================] - 24s 788ms/step - loss: 0.8947 - accuracy: 0.6812 - val_loss: 1.0565 - val_accuracy: 0.6412
Epoch 15/50
31/31 [==============================] - 24s 780ms/step - loss: 0.8828 - accuracy: 0.6910 - val_loss: 1.0579 - val_accuracy: 0.6409
Epoch 16/50
31/31 [==============================] - 24s 780ms/step - loss: 0.8800 - accuracy: 0.6840 - val_loss: 1.0443 - val_accuracy: 0.6463
Epoch 17/50
31/31 [==============================] - 24s 791ms/step - loss: 0.8802 - accuracy: 0.6855 - val_loss: 1.0461 - val_accuracy: 0.6518
Epoch 18/50
31/31 [==============================] - 24s 783ms/step - loss: 0.8640 - accuracy: 0.6967 - val_loss: 1.0409 - val_accuracy: 0.6345
Epoch 19/50
31/31 [==============================] - 24s 784ms/step - loss: 0.8671 - accuracy: 0.6939 - val_loss: 1.0072 - val_accuracy: 0.6591
Epoch 20/50
31/31 [==============================] - 24s 786ms/step - loss: 0.8203 - accuracy: 0.7095 - val_loss: 1.0269 - val_accuracy: 0.6573
Epoch 21/50
31/31 [==============================] - 24s 783ms/step - loss: 0.8271 - accuracy: 0.7028 - val_loss: 1.0329 - val_accuracy: 0.6448
Epoch 22/50
31/31 [==============================] - 24s 784ms/step - loss: 0.7969 - accuracy: 0.7200 - val_loss: 1.0152 - val_accuracy: 0.6645
Epoch 23/50
31/31 [==============================] - 24s 778ms/step - loss: 0.8064 - accuracy: 0.7110 - val_loss: 1.0190 - val_accuracy: 0.6515
Epoch 24/50
31/31 [==============================] - 24s 778ms/step - loss: 0.7996 - accuracy: 0.7176 - val_loss: 1.0212 - val_accuracy: 0.6533
Epoch 25/50
31/31 [==============================] - 24s 771ms/step - loss: 0.7976 - accuracy: 0.7172 - val_loss: 1.0274 - val_accuracy: 0.6488
Epoch 26/50
31/31 [==============================] - 24s 772ms/step - loss: 0.7552 - accuracy: 0.7354 - val_loss: 0.9848 - val_accuracy: 0.6670
Epoch 27/50
31/31 [==============================] - 24s 782ms/step - loss: 0.7558 - accuracy: 0.7369 - val_loss: 1.0189 - val_accuracy: 0.6600
Epoch 28/50
31/31 [==============================] - 24s 772ms/step - loss: 0.7675 - accuracy: 0.7273 - val_loss: 1.0059 - val_accuracy: 0.6624
Epoch 29/50
31/31 [==============================] - 24s 774ms/step - loss: 0.7617 - accuracy: 0.7288 - val_loss: 1.0649 - val_accuracy: 0.6448
Epoch 30/50
31/31 [==============================] - 24s 768ms/step - loss: 0.7551 - accuracy: 0.7314 - val_loss: 1.0064 - val_accuracy: 0.6576
Epoch 31/50
31/31 [==============================] - 24s 768ms/step - loss: 0.7316 - accuracy: 0.7418 - val_loss: 1.0071 - val_accuracy: 0.6715
Epoch 32/50
31/31 [==============================] - 24s 762ms/step - loss: 0.7229 - accuracy: 0.7456 - val_loss: 0.9854 - val_accuracy: 0.6749
Epoch 33/50
31/31 [==============================] - 24s 765ms/step - loss: 0.6967 - accuracy: 0.7526 - val_loss: 0.9722 - val_accuracy: 0.6843
Epoch 34/50
31/31 [==============================] - 24s 777ms/step - loss: 0.6967 - accuracy: 0.7521 - val_loss: 0.9777 - val_accuracy: 0.6770
Epoch 35/50
31/31 [==============================] - 24s 777ms/step - loss: 0.6809 - accuracy: 0.7547 - val_loss: 1.0037 - val_accuracy: 0.6727
Epoch 36/50
31/31 [==============================] - 24s 770ms/step - loss: 0.6756 - accuracy: 0.7575 - val_loss: 0.9888 - val_accuracy: 0.6861
Epoch 37/50
31/31 [==============================] - 24s 773ms/step - loss: 0.6682 - accuracy: 0.7657 - val_loss: 0.9984 - val_accuracy: 0.6618
Epoch 38/50
31/31 [==============================] - 24s 773ms/step - loss: 0.6763 - accuracy: 0.7595 - val_loss: 0.9863 - val_accuracy: 0.6785
Epoch 39/50
31/31 [==============================] - 24s 769ms/step - loss: 0.6568 - accuracy: 0.7708 - val_loss: 0.9877 - val_accuracy: 0.6773
Epoch 40/50
31/31 [==============================] - 24s 780ms/step - loss: 0.6301 - accuracy: 0.7777 - val_loss: 0.9940 - val_accuracy: 0.6824
Epoch 41/50
31/31 [==============================] - 24s 772ms/step - loss: 0.6410 - accuracy: 0.7690 - val_loss: 0.9749 - val_accuracy: 0.6809
Epoch 42/50
31/31 [==============================] - 24s 780ms/step - loss: 0.6143 - accuracy: 0.7838 - val_loss: 0.9968 - val_accuracy: 0.6846
Epoch 43/50
31/31 [==============================] - 24s 779ms/step - loss: 0.6279 - accuracy: 0.7765 - val_loss: 0.9738 - val_accuracy: 0.6900
Epoch 44/50
31/31 [==============================] - 25s 798ms/step - loss: 0.6138 - accuracy: 0.7813 - val_loss: 1.0264 - val_accuracy: 0.6655
Epoch 45/50
31/31 [==============================] - 24s 777ms/step - loss: 0.5951 - accuracy: 0.7899 - val_loss: 1.0052 - val_accuracy: 0.6840
Epoch 46/50
31/31 [==============================] - 24s 789ms/step - loss: 0.6033 - accuracy: 0.7874 - val_loss: 0.9919 - val_accuracy: 0.6873
Epoch 47/50
31/31 [==============================] - 25s 794ms/step - loss: 0.5840 - accuracy: 0.7959 - val_loss: 0.9564 - val_accuracy: 0.6931
Epoch 48/50
31/31 [==============================] - 25s 800ms/step - loss: 0.5840 - accuracy: 0.7926 - val_loss: 0.9842 - val_accuracy: 0.6870
Epoch 49/50
31/31 [==============================] - 24s 788ms/step - loss: 0.5678 - accuracy: 0.8007 - val_loss: 0.9822 - val_accuracy: 0.6849
Epoch 50/50
31/31 [==============================] - 24s 793ms/step - loss: 0.5489 - accuracy: 0.8052 - val_loss: 0.9690 - val_accuracy: 0.6958
In [ ]:
tset_loss,accuracy=model.evaluate(X_test,y_test)
print("The test Loss is:",tset_loss)
print("\nThe Best test Accuracy is:",accuracy*100)
104/104 [==============================] - 4s 36ms/step - loss: 0.9690 - accuracy: 0.6958
The test Loss is: 0.9690216779708862

The Best test Accuracy is: 69.57840323448181
In [ ]:
pd.DataFrame(model_history.history).plot(figsize=(12,7),xlabel='Epochs')
Out[ ]:
<matplotlib.axes._subplots.AxesSubplot at 0x7fbfffa206d0>
In [ ]:
from sklearn.metrics import classification_report,confusion_matrix
import seaborn as sns
y_pred=tf.round(model.predict(X_test))
y_pred=np.argmax(y_pred, axis=1)
conf_mat=confusion_matrix(y_test,y_pred)
fig,ax=plt.subplots(figsize=(14,7))
ax=sns.heatmap(conf_mat,cmap='coolwarm',annot=True)
In [ ]:
from sklearn.metrics import accuracy_score,precision_score,recall_score,f1_score
def evaluate_preds(y_test,y_pred):
  
  accuracy=accuracy_score(y_test,y_pred)
  precision=precision_score(y_test,y_pred,average='macro')
  recall=recall_score(y_test,y_pred,average='macro')
  f1=f1_score(y_test,y_pred,average='macro')
  metric_dict={'Accuracy':round(accuracy*100,2),
      "Precision":round(precision*100,2),
      "Recall":round(recall*100,2),
      "F1-Score":round(f1*100,2)}
  return metric_dict

performance_mat=evaluate_preds(y_test,y_pred)
perf_mat_series=pd.Series(performance_mat)
perf_mat_series
Out[ ]:
Accuracy     65.97
Precision    71.12
Recall       65.85
F1-Score     66.63
dtype: float64
In [ ]:
fig,ax=plt.subplots(figsize=(12,7))
ax=perf_mat_series.plot(kind='bar')
In [ ]:

In [ ]:

In [ ]:

In [ ]:

Experiment6¶

In [ ]:
#Batch size-220
model_history=trainModel(model=model,epochs=90,optimizer='adam')
Epoch 1/90
31/31 [==============================] - 27s 799ms/step - loss: 0.5880 - accuracy: 0.7866 - val_loss: 0.9894 - val_accuracy: 0.6888
Epoch 2/90
31/31 [==============================] - 24s 789ms/step - loss: 0.5572 - accuracy: 0.7999 - val_loss: 0.9805 - val_accuracy: 0.6900
Epoch 3/90
31/31 [==============================] - 25s 802ms/step - loss: 0.5385 - accuracy: 0.8070 - val_loss: 0.9730 - val_accuracy: 0.6924
Epoch 4/90
31/31 [==============================] - 25s 793ms/step - loss: 0.5220 - accuracy: 0.8173 - val_loss: 0.9625 - val_accuracy: 0.7022
Epoch 5/90
31/31 [==============================] - 24s 791ms/step - loss: 0.5130 - accuracy: 0.8200 - val_loss: 0.9655 - val_accuracy: 0.7100
Epoch 6/90
31/31 [==============================] - 24s 787ms/step - loss: 0.5001 - accuracy: 0.8224 - val_loss: 0.9762 - val_accuracy: 0.7058
Epoch 7/90
31/31 [==============================] - 24s 790ms/step - loss: 0.5054 - accuracy: 0.8253 - val_loss: 0.9985 - val_accuracy: 0.6943
Epoch 8/90
31/31 [==============================] - 24s 785ms/step - loss: 0.4923 - accuracy: 0.8283 - val_loss: 1.0149 - val_accuracy: 0.6833
Epoch 9/90
31/31 [==============================] - 24s 781ms/step - loss: 0.4853 - accuracy: 0.8276 - val_loss: 1.0022 - val_accuracy: 0.6967
Epoch 10/90
31/31 [==============================] - 24s 786ms/step - loss: 0.4934 - accuracy: 0.8249 - val_loss: 1.0222 - val_accuracy: 0.6952
Epoch 11/90
31/31 [==============================] - 24s 784ms/step - loss: 0.4716 - accuracy: 0.8345 - val_loss: 0.9803 - val_accuracy: 0.7022
Epoch 12/90
31/31 [==============================] - 24s 783ms/step - loss: 0.4644 - accuracy: 0.8394 - val_loss: 0.9845 - val_accuracy: 0.7125
Epoch 13/90
31/31 [==============================] - 24s 788ms/step - loss: 0.4510 - accuracy: 0.8371 - val_loss: 0.9847 - val_accuracy: 0.6988
Epoch 14/90
31/31 [==============================] - 24s 786ms/step - loss: 0.4544 - accuracy: 0.8419 - val_loss: 1.0280 - val_accuracy: 0.6967
Epoch 15/90
31/31 [==============================] - 24s 784ms/step - loss: 0.4641 - accuracy: 0.8395 - val_loss: 1.0159 - val_accuracy: 0.6918
Epoch 16/90
31/31 [==============================] - 24s 792ms/step - loss: 0.4496 - accuracy: 0.8428 - val_loss: 1.0109 - val_accuracy: 0.7025
Epoch 17/90
31/31 [==============================] - 24s 787ms/step - loss: 0.4562 - accuracy: 0.8394 - val_loss: 1.0181 - val_accuracy: 0.7043
Epoch 18/90
31/31 [==============================] - 25s 794ms/step - loss: 0.4367 - accuracy: 0.8424 - val_loss: 1.0025 - val_accuracy: 0.6994
Epoch 19/90
31/31 [==============================] - 24s 789ms/step - loss: 0.4285 - accuracy: 0.8488 - val_loss: 0.9865 - val_accuracy: 0.7137
Epoch 20/90
31/31 [==============================] - 24s 790ms/step - loss: 0.4057 - accuracy: 0.8594 - val_loss: 1.0482 - val_accuracy: 0.6937
Epoch 21/90
31/31 [==============================] - 25s 794ms/step - loss: 0.4350 - accuracy: 0.8509 - val_loss: 1.0506 - val_accuracy: 0.6934
Epoch 22/90
31/31 [==============================] - 24s 788ms/step - loss: 0.4310 - accuracy: 0.8498 - val_loss: 0.9915 - val_accuracy: 0.7119
Epoch 23/90
31/31 [==============================] - 24s 791ms/step - loss: 0.4164 - accuracy: 0.8506 - val_loss: 1.0151 - val_accuracy: 0.7015
Epoch 24/90
31/31 [==============================] - 26s 836ms/step - loss: 0.4038 - accuracy: 0.8579 - val_loss: 1.0162 - val_accuracy: 0.7067
Epoch 25/90
31/31 [==============================] - 26s 838ms/step - loss: 0.4066 - accuracy: 0.8566 - val_loss: 1.0216 - val_accuracy: 0.6982
Epoch 26/90
31/31 [==============================] - 25s 816ms/step - loss: 0.3888 - accuracy: 0.8646 - val_loss: 1.0432 - val_accuracy: 0.6994
Epoch 27/90
31/31 [==============================] - 25s 808ms/step - loss: 0.3822 - accuracy: 0.8636 - val_loss: 1.0513 - val_accuracy: 0.6952
Epoch 28/90
31/31 [==============================] - 25s 805ms/step - loss: 0.3775 - accuracy: 0.8645 - val_loss: 1.0339 - val_accuracy: 0.7164
Epoch 29/90
31/31 [==============================] - 25s 821ms/step - loss: 0.3722 - accuracy: 0.8687 - val_loss: 1.0257 - val_accuracy: 0.7031
Epoch 30/90
31/31 [==============================] - 25s 812ms/step - loss: 0.3808 - accuracy: 0.8590 - val_loss: 1.0355 - val_accuracy: 0.7040
Epoch 31/90
31/31 [==============================] - 25s 817ms/step - loss: 0.3619 - accuracy: 0.8742 - val_loss: 1.0472 - val_accuracy: 0.7073
Epoch 32/90
31/31 [==============================] - 25s 807ms/step - loss: 0.3574 - accuracy: 0.8743 - val_loss: 1.0409 - val_accuracy: 0.7094
Epoch 33/90
31/31 [==============================] - 25s 811ms/step - loss: 0.3723 - accuracy: 0.8688 - val_loss: 1.0423 - val_accuracy: 0.7106
Epoch 34/90
31/31 [==============================] - 25s 815ms/step - loss: 0.3534 - accuracy: 0.8785 - val_loss: 1.0079 - val_accuracy: 0.7103
Epoch 35/90
31/31 [==============================] - 25s 799ms/step - loss: 0.3362 - accuracy: 0.8815 - val_loss: 1.0662 - val_accuracy: 0.7049
Epoch 36/90
31/31 [==============================] - 25s 805ms/step - loss: 0.3487 - accuracy: 0.8784 - val_loss: 1.0907 - val_accuracy: 0.6976
Epoch 37/90
31/31 [==============================] - 25s 798ms/step - loss: 0.3393 - accuracy: 0.8833 - val_loss: 1.0582 - val_accuracy: 0.7091
Epoch 38/90
31/31 [==============================] - 25s 804ms/step - loss: 0.3547 - accuracy: 0.8755 - val_loss: 1.0506 - val_accuracy: 0.7125
Epoch 39/90
31/31 [==============================] - 25s 806ms/step - loss: 0.3434 - accuracy: 0.8805 - val_loss: 1.0275 - val_accuracy: 0.7122
Epoch 40/90
31/31 [==============================] - 25s 796ms/step - loss: 0.3252 - accuracy: 0.8836 - val_loss: 1.0410 - val_accuracy: 0.7116
Epoch 41/90
31/31 [==============================] - 25s 811ms/step - loss: 0.3257 - accuracy: 0.8845 - val_loss: 1.0541 - val_accuracy: 0.7088
Epoch 42/90
31/31 [==============================] - 25s 809ms/step - loss: 0.3208 - accuracy: 0.8900 - val_loss: 1.0646 - val_accuracy: 0.7170
Epoch 43/90
31/31 [==============================] - 25s 818ms/step - loss: 0.3142 - accuracy: 0.8893 - val_loss: 1.0640 - val_accuracy: 0.7128
Epoch 44/90
31/31 [==============================] - 25s 811ms/step - loss: 0.3036 - accuracy: 0.8959 - val_loss: 1.0721 - val_accuracy: 0.7094
Epoch 45/90
31/31 [==============================] - 25s 805ms/step - loss: 0.3037 - accuracy: 0.8915 - val_loss: 1.0794 - val_accuracy: 0.7103
Epoch 46/90
31/31 [==============================] - 25s 810ms/step - loss: 0.2837 - accuracy: 0.9026 - val_loss: 1.0449 - val_accuracy: 0.7194
Epoch 47/90
31/31 [==============================] - 25s 807ms/step - loss: 0.2964 - accuracy: 0.8963 - val_loss: 1.0919 - val_accuracy: 0.7161
Epoch 48/90
31/31 [==============================] - 25s 817ms/step - loss: 0.2990 - accuracy: 0.8968 - val_loss: 1.0944 - val_accuracy: 0.7094
Epoch 49/90
31/31 [==============================] - 25s 812ms/step - loss: 0.3032 - accuracy: 0.8938 - val_loss: 1.1176 - val_accuracy: 0.7037
Epoch 50/90
31/31 [==============================] - 25s 815ms/step - loss: 0.3146 - accuracy: 0.8900 - val_loss: 1.0785 - val_accuracy: 0.7034
Epoch 51/90
31/31 [==============================] - 25s 804ms/step - loss: 0.2761 - accuracy: 0.9062 - val_loss: 1.0621 - val_accuracy: 0.7179
Epoch 52/90
31/31 [==============================] - 25s 810ms/step - loss: 0.2868 - accuracy: 0.9008 - val_loss: 1.1215 - val_accuracy: 0.7019
Epoch 53/90
31/31 [==============================] - 25s 807ms/step - loss: 0.2895 - accuracy: 0.8960 - val_loss: 1.1232 - val_accuracy: 0.7034
Epoch 54/90
31/31 [==============================] - 25s 796ms/step - loss: 0.2855 - accuracy: 0.9044 - val_loss: 1.0719 - val_accuracy: 0.7091
Epoch 55/90
31/31 [==============================] - 25s 795ms/step - loss: 0.2870 - accuracy: 0.9000 - val_loss: 1.0825 - val_accuracy: 0.7119
Epoch 56/90
31/31 [==============================] - 25s 797ms/step - loss: 0.2699 - accuracy: 0.9083 - val_loss: 1.1093 - val_accuracy: 0.7113
Epoch 57/90
31/31 [==============================] - 24s 785ms/step - loss: 0.2793 - accuracy: 0.9062 - val_loss: 1.0952 - val_accuracy: 0.7091
Epoch 58/90
31/31 [==============================] - 24s 785ms/step - loss: 0.2711 - accuracy: 0.9056 - val_loss: 1.0725 - val_accuracy: 0.7100
Epoch 59/90
31/31 [==============================] - 24s 786ms/step - loss: 0.2585 - accuracy: 0.9111 - val_loss: 1.1572 - val_accuracy: 0.7052
Epoch 60/90
31/31 [==============================] - 24s 791ms/step - loss: 0.2492 - accuracy: 0.9126 - val_loss: 1.1160 - val_accuracy: 0.7119
Epoch 61/90
31/31 [==============================] - 24s 781ms/step - loss: 0.2516 - accuracy: 0.9126 - val_loss: 1.0983 - val_accuracy: 0.7222
Epoch 62/90
31/31 [==============================] - 24s 790ms/step - loss: 0.2518 - accuracy: 0.9114 - val_loss: 1.0993 - val_accuracy: 0.7128
Epoch 63/90
31/31 [==============================] - 24s 787ms/step - loss: 0.2511 - accuracy: 0.9153 - val_loss: 1.1054 - val_accuracy: 0.7064
Epoch 64/90
31/31 [==============================] - 24s 789ms/step - loss: 0.2330 - accuracy: 0.9241 - val_loss: 1.1499 - val_accuracy: 0.7091
Epoch 65/90
31/31 [==============================] - 25s 803ms/step - loss: 0.2384 - accuracy: 0.9180 - val_loss: 1.1167 - val_accuracy: 0.7109
Epoch 66/90
31/31 [==============================] - 25s 808ms/step - loss: 0.2366 - accuracy: 0.9196 - val_loss: 1.0975 - val_accuracy: 0.7152
Epoch 67/90
31/31 [==============================] - 25s 805ms/step - loss: 0.2342 - accuracy: 0.9199 - val_loss: 1.1112 - val_accuracy: 0.7122
Epoch 68/90
31/31 [==============================] - 25s 811ms/step - loss: 0.2328 - accuracy: 0.9192 - val_loss: 1.1090 - val_accuracy: 0.7188
Epoch 69/90
31/31 [==============================] - 26s 830ms/step - loss: 0.2287 - accuracy: 0.9211 - val_loss: 1.1157 - val_accuracy: 0.7255
Epoch 70/90
31/31 [==============================] - 26s 825ms/step - loss: 0.2267 - accuracy: 0.9232 - val_loss: 1.1574 - val_accuracy: 0.7037
Epoch 71/90
31/31 [==============================] - 26s 834ms/step - loss: 0.2442 - accuracy: 0.9151 - val_loss: 1.1566 - val_accuracy: 0.7015
Epoch 72/90
31/31 [==============================] - 26s 838ms/step - loss: 0.2357 - accuracy: 0.9169 - val_loss: 1.1243 - val_accuracy: 0.7131
Epoch 73/90
31/31 [==============================] - 27s 857ms/step - loss: 0.2321 - accuracy: 0.9229 - val_loss: 1.1337 - val_accuracy: 0.7137
Epoch 74/90
31/31 [==============================] - 27s 860ms/step - loss: 0.2186 - accuracy: 0.9274 - val_loss: 1.1396 - val_accuracy: 0.7091
Epoch 75/90
31/31 [==============================] - 28s 896ms/step - loss: 0.2186 - accuracy: 0.9231 - val_loss: 1.1247 - val_accuracy: 0.7143
Epoch 76/90
31/31 [==============================] - 27s 888ms/step - loss: 0.2014 - accuracy: 0.9275 - val_loss: 1.1249 - val_accuracy: 0.7240
Epoch 77/90
31/31 [==============================] - 27s 877ms/step - loss: 0.2213 - accuracy: 0.9257 - val_loss: 1.1301 - val_accuracy: 0.7170
Epoch 78/90
31/31 [==============================] - 27s 865ms/step - loss: 0.2167 - accuracy: 0.9247 - val_loss: 1.1321 - val_accuracy: 0.7243
Epoch 79/90
31/31 [==============================] - 26s 854ms/step - loss: 0.2033 - accuracy: 0.9287 - val_loss: 1.1331 - val_accuracy: 0.7185
Epoch 80/90
31/31 [==============================] - 26s 840ms/step - loss: 0.2004 - accuracy: 0.9317 - val_loss: 1.1749 - val_accuracy: 0.7085
Epoch 81/90
31/31 [==============================] - 26s 845ms/step - loss: 0.2016 - accuracy: 0.9311 - val_loss: 1.2134 - val_accuracy: 0.7094
Epoch 82/90
31/31 [==============================] - 26s 841ms/step - loss: 0.2105 - accuracy: 0.9234 - val_loss: 1.1844 - val_accuracy: 0.7131
Epoch 83/90
31/31 [==============================] - 26s 857ms/step - loss: 0.2049 - accuracy: 0.9293 - val_loss: 1.1379 - val_accuracy: 0.7185
Epoch 84/90
31/31 [==============================] - 27s 868ms/step - loss: 0.2063 - accuracy: 0.9289 - val_loss: 1.1334 - val_accuracy: 0.7228
Epoch 85/90
31/31 [==============================] - 27s 858ms/step - loss: 0.2074 - accuracy: 0.9307 - val_loss: 1.1378 - val_accuracy: 0.7155
Epoch 86/90
31/31 [==============================] - 27s 870ms/step - loss: 0.2065 - accuracy: 0.9259 - val_loss: 1.1910 - val_accuracy: 0.7100
Epoch 87/90
31/31 [==============================] - 27s 859ms/step - loss: 0.1948 - accuracy: 0.9341 - val_loss: 1.1594 - val_accuracy: 0.7200
Epoch 88/90
31/31 [==============================] - 27s 857ms/step - loss: 0.1865 - accuracy: 0.9386 - val_loss: 1.1931 - val_accuracy: 0.7222
Epoch 89/90
31/31 [==============================] - 27s 867ms/step - loss: 0.1988 - accuracy: 0.9287 - val_loss: 1.1704 - val_accuracy: 0.7231
Epoch 90/90
31/31 [==============================] - 26s 855ms/step - loss: 0.1937 - accuracy: 0.9375 - val_loss: 1.1936 - val_accuracy: 0.7176
In [ ]:
tset_loss,accuracy=model.evaluate(X_test,y_test)
print("The test Loss is:",tset_loss)
print("\nThe Best test Accuracy is:",accuracy*100)
104/104 [==============================] - 4s 39ms/step - loss: 1.1936 - accuracy: 0.7176
The test Loss is: 1.1936322450637817

The Best test Accuracy is: 71.76221013069153
In [ ]:
pd.DataFrame(model_history.history).plot(figsize=(12,7),xlabel='Epochs')
Out[ ]:
<matplotlib.axes._subplots.AxesSubplot at 0x7fbff9a0b390>
In [ ]:
from sklearn.metrics import classification_report,confusion_matrix
import seaborn as sns
y_pred=tf.round(model.predict(X_test))
y_pred=np.argmax(y_pred, axis=1)
conf_mat=confusion_matrix(y_test,y_pred)
fig,ax=plt.subplots(figsize=(14,7))
ax=sns.heatmap(conf_mat,cmap='coolwarm',annot=True)
In [ ]:
from sklearn.metrics import accuracy_score,precision_score,recall_score,f1_score
def evaluate_preds(y_test,y_pred):
  
  accuracy=accuracy_score(y_test,y_pred)
  precision=precision_score(y_test,y_pred,average='macro')
  recall=recall_score(y_test,y_pred,average='macro')
  f1=f1_score(y_test,y_pred,average='macro')
  metric_dict={'Accuracy':round(accuracy*100,2),
      "Precision":round(precision*100,2),
      "Recall":round(recall*100,2),
      "F1-Score":round(f1*100,2)}
  return metric_dict

performance_mat=evaluate_preds(y_test,y_pred)
perf_mat_series=pd.Series(performance_mat)
perf_mat_series
Out[ ]:
Accuracy     70.79
Precision    71.69
Recall       70.76
F1-Score     70.65
dtype: float64
In [ ]:
fig,ax=plt.subplots(figsize=(12,7))
ax=perf_mat_series.plot(kind='bar')

Experiment7¶

In [ ]:
#Batch size-220
model_history=trainModel(model=model,epochs=120,optimizer='adam')
Epoch 1/120
31/31 [==============================] - 28s 854ms/step - loss: 0.2264 - accuracy: 0.9189 - val_loss: 1.1756 - val_accuracy: 0.7231
Epoch 2/120
31/31 [==============================] - 26s 844ms/step - loss: 0.1838 - accuracy: 0.9365 - val_loss: 1.1742 - val_accuracy: 0.7304
Epoch 3/120
31/31 [==============================] - 26s 841ms/step - loss: 0.1805 - accuracy: 0.9399 - val_loss: 1.1708 - val_accuracy: 0.7213
Epoch 4/120
31/31 [==============================] - 26s 841ms/step - loss: 0.1970 - accuracy: 0.9328 - val_loss: 1.2075 - val_accuracy: 0.7143
Epoch 5/120
31/31 [==============================] - 26s 835ms/step - loss: 0.1770 - accuracy: 0.9380 - val_loss: 1.2233 - val_accuracy: 0.7149
Epoch 6/120
31/31 [==============================] - 26s 826ms/step - loss: 0.1722 - accuracy: 0.9428 - val_loss: 1.1693 - val_accuracy: 0.7228
Epoch 7/120
31/31 [==============================] - 26s 835ms/step - loss: 0.1761 - accuracy: 0.9423 - val_loss: 1.1790 - val_accuracy: 0.7243
Epoch 8/120
31/31 [==============================] - 26s 834ms/step - loss: 0.1705 - accuracy: 0.9426 - val_loss: 1.2342 - val_accuracy: 0.7100
Epoch 9/120
31/31 [==============================] - 26s 834ms/step - loss: 0.1806 - accuracy: 0.9386 - val_loss: 1.1994 - val_accuracy: 0.7182
Epoch 10/120
31/31 [==============================] - 26s 842ms/step - loss: 0.1881 - accuracy: 0.9335 - val_loss: 1.1965 - val_accuracy: 0.7200
Epoch 11/120
31/31 [==============================] - 26s 848ms/step - loss: 0.1828 - accuracy: 0.9377 - val_loss: 1.1851 - val_accuracy: 0.7273
Epoch 12/120
31/31 [==============================] - 26s 846ms/step - loss: 0.1807 - accuracy: 0.9419 - val_loss: 1.2001 - val_accuracy: 0.7222
Epoch 13/120
31/31 [==============================] - 26s 829ms/step - loss: 0.1884 - accuracy: 0.9353 - val_loss: 1.1783 - val_accuracy: 0.7316
Epoch 14/120
31/31 [==============================] - 26s 836ms/step - loss: 0.1706 - accuracy: 0.9420 - val_loss: 1.1704 - val_accuracy: 0.7267
Epoch 15/120
31/31 [==============================] - 26s 852ms/step - loss: 0.1686 - accuracy: 0.9453 - val_loss: 1.1966 - val_accuracy: 0.7188
Epoch 16/120
31/31 [==============================] - 26s 852ms/step - loss: 0.1883 - accuracy: 0.9341 - val_loss: 1.1989 - val_accuracy: 0.7207
Epoch 17/120
31/31 [==============================] - 27s 861ms/step - loss: 0.1779 - accuracy: 0.9380 - val_loss: 1.1814 - val_accuracy: 0.7149
Epoch 18/120
31/31 [==============================] - 26s 855ms/step - loss: 0.1603 - accuracy: 0.9459 - val_loss: 1.2069 - val_accuracy: 0.7255
Epoch 19/120
31/31 [==============================] - 26s 855ms/step - loss: 0.1551 - accuracy: 0.9480 - val_loss: 1.2192 - val_accuracy: 0.7219
Epoch 20/120
31/31 [==============================] - 26s 851ms/step - loss: 0.1831 - accuracy: 0.9392 - val_loss: 1.2235 - val_accuracy: 0.7222
Epoch 21/120
31/31 [==============================] - 26s 847ms/step - loss: 0.1706 - accuracy: 0.9402 - val_loss: 1.1956 - val_accuracy: 0.7273
Epoch 22/120
31/31 [==============================] - 26s 840ms/step - loss: 0.1463 - accuracy: 0.9523 - val_loss: 1.2300 - val_accuracy: 0.7207
Epoch 23/120
31/31 [==============================] - 26s 838ms/step - loss: 0.1697 - accuracy: 0.9443 - val_loss: 1.2401 - val_accuracy: 0.7222
Epoch 24/120
31/31 [==============================] - 26s 845ms/step - loss: 0.1605 - accuracy: 0.9471 - val_loss: 1.2364 - val_accuracy: 0.7191
Epoch 25/120
31/31 [==============================] - 26s 836ms/step - loss: 0.1647 - accuracy: 0.9456 - val_loss: 1.2012 - val_accuracy: 0.7234
Epoch 26/120
31/31 [==============================] - 26s 851ms/step - loss: 0.1567 - accuracy: 0.9456 - val_loss: 1.2322 - val_accuracy: 0.7219
Epoch 27/120
31/31 [==============================] - 26s 841ms/step - loss: 0.1620 - accuracy: 0.9431 - val_loss: 1.2067 - val_accuracy: 0.7213
Epoch 28/120
31/31 [==============================] - 26s 834ms/step - loss: 0.1503 - accuracy: 0.9494 - val_loss: 1.2082 - val_accuracy: 0.7197
Epoch 29/120
31/31 [==============================] - 26s 840ms/step - loss: 0.1569 - accuracy: 0.9434 - val_loss: 1.2302 - val_accuracy: 0.7191
Epoch 30/120
31/31 [==============================] - 26s 839ms/step - loss: 0.1508 - accuracy: 0.9482 - val_loss: 1.2924 - val_accuracy: 0.7067
Epoch 31/120
31/31 [==============================] - 26s 826ms/step - loss: 0.1563 - accuracy: 0.9465 - val_loss: 1.2971 - val_accuracy: 0.7109
Epoch 32/120
31/31 [==============================] - 25s 818ms/step - loss: 0.1462 - accuracy: 0.9501 - val_loss: 1.2818 - val_accuracy: 0.7094
Epoch 33/120
31/31 [==============================] - 25s 821ms/step - loss: 0.1466 - accuracy: 0.9514 - val_loss: 1.2364 - val_accuracy: 0.7131
Epoch 34/120
31/31 [==============================] - 25s 820ms/step - loss: 0.1575 - accuracy: 0.9495 - val_loss: 1.2542 - val_accuracy: 0.7167
Epoch 35/120
31/31 [==============================] - 25s 825ms/step - loss: 0.1374 - accuracy: 0.9529 - val_loss: 1.2250 - val_accuracy: 0.7246
Epoch 36/120
31/31 [==============================] - 25s 825ms/step - loss: 0.1399 - accuracy: 0.9547 - val_loss: 1.3283 - val_accuracy: 0.7082
Epoch 37/120
31/31 [==============================] - 25s 818ms/step - loss: 0.1403 - accuracy: 0.9562 - val_loss: 1.2692 - val_accuracy: 0.7194
Epoch 38/120
31/31 [==============================] - 25s 824ms/step - loss: 0.1404 - accuracy: 0.9564 - val_loss: 1.2001 - val_accuracy: 0.7346
Epoch 39/120
31/31 [==============================] - 25s 822ms/step - loss: 0.1349 - accuracy: 0.9546 - val_loss: 1.2088 - val_accuracy: 0.7322
Epoch 40/120
31/31 [==============================] - 25s 822ms/step - loss: 0.1393 - accuracy: 0.9541 - val_loss: 1.2276 - val_accuracy: 0.7204
Epoch 41/120
31/31 [==============================] - 25s 822ms/step - loss: 0.1370 - accuracy: 0.9555 - val_loss: 1.2278 - val_accuracy: 0.7304
Epoch 42/120
31/31 [==============================] - 25s 810ms/step - loss: 0.1323 - accuracy: 0.9577 - val_loss: 1.2480 - val_accuracy: 0.7279
Epoch 43/120
31/31 [==============================] - 25s 798ms/step - loss: 0.1310 - accuracy: 0.9585 - val_loss: 1.2548 - val_accuracy: 0.7216
Epoch 44/120
31/31 [==============================] - 25s 813ms/step - loss: 0.1305 - accuracy: 0.9573 - val_loss: 1.2455 - val_accuracy: 0.7261
Epoch 45/120
31/31 [==============================] - 25s 820ms/step - loss: 0.1452 - accuracy: 0.9491 - val_loss: 1.2201 - val_accuracy: 0.7243
Epoch 46/120
31/31 [==============================] - 25s 817ms/step - loss: 0.1444 - accuracy: 0.9526 - val_loss: 1.2656 - val_accuracy: 0.7210
Epoch 47/120
31/31 [==============================] - 25s 796ms/step - loss: 0.1371 - accuracy: 0.9538 - val_loss: 1.2865 - val_accuracy: 0.7188
Epoch 48/120
31/31 [==============================] - 25s 805ms/step - loss: 0.1416 - accuracy: 0.9525 - val_loss: 1.2408 - val_accuracy: 0.7276
Epoch 49/120
31/31 [==============================] - 25s 808ms/step - loss: 0.1416 - accuracy: 0.9534 - val_loss: 1.3074 - val_accuracy: 0.7170
Epoch 50/120
31/31 [==============================] - 25s 810ms/step - loss: 0.1397 - accuracy: 0.9505 - val_loss: 1.2379 - val_accuracy: 0.7295
Epoch 51/120
31/31 [==============================] - 25s 805ms/step - loss: 0.1292 - accuracy: 0.9555 - val_loss: 1.2751 - val_accuracy: 0.7249
Epoch 52/120
31/31 [==============================] - 25s 809ms/step - loss: 0.1386 - accuracy: 0.9535 - val_loss: 1.2526 - val_accuracy: 0.7295
Epoch 53/120
31/31 [==============================] - 25s 805ms/step - loss: 0.1227 - accuracy: 0.9589 - val_loss: 1.3191 - val_accuracy: 0.7234
Epoch 54/120
31/31 [==============================] - 25s 807ms/step - loss: 0.1227 - accuracy: 0.9606 - val_loss: 1.2560 - val_accuracy: 0.7240
Epoch 55/120
31/31 [==============================] - 24s 789ms/step - loss: 0.1351 - accuracy: 0.9540 - val_loss: 1.2647 - val_accuracy: 0.7228
Epoch 56/120
31/31 [==============================] - 24s 781ms/step - loss: 0.1244 - accuracy: 0.9597 - val_loss: 1.2542 - val_accuracy: 0.7276
Epoch 57/120
31/31 [==============================] - 24s 775ms/step - loss: 0.1204 - accuracy: 0.9623 - val_loss: 1.2490 - val_accuracy: 0.7310
Epoch 58/120
31/31 [==============================] - 24s 783ms/step - loss: 0.1174 - accuracy: 0.9589 - val_loss: 1.2910 - val_accuracy: 0.7231
Epoch 59/120
31/31 [==============================] - 24s 787ms/step - loss: 0.1065 - accuracy: 0.9665 - val_loss: 1.2907 - val_accuracy: 0.7282
Epoch 60/120
31/31 [==============================] - 24s 778ms/step - loss: 0.1153 - accuracy: 0.9626 - val_loss: 1.2913 - val_accuracy: 0.7298
Epoch 61/120
31/31 [==============================] - 24s 775ms/step - loss: 0.1120 - accuracy: 0.9638 - val_loss: 1.2943 - val_accuracy: 0.7237
Epoch 62/120
31/31 [==============================] - 24s 780ms/step - loss: 0.1227 - accuracy: 0.9625 - val_loss: 1.3151 - val_accuracy: 0.7249
Epoch 63/120
31/31 [==============================] - 24s 787ms/step - loss: 0.1306 - accuracy: 0.9574 - val_loss: 1.2822 - val_accuracy: 0.7291
Epoch 64/120
31/31 [==============================] - 25s 799ms/step - loss: 0.1304 - accuracy: 0.9544 - val_loss: 1.2323 - val_accuracy: 0.7346
Epoch 65/120
31/31 [==============================] - 25s 815ms/step - loss: 0.1142 - accuracy: 0.9612 - val_loss: 1.2808 - val_accuracy: 0.7222
Epoch 66/120
31/31 [==============================] - 25s 822ms/step - loss: 0.1203 - accuracy: 0.9580 - val_loss: 1.2883 - val_accuracy: 0.7185
Epoch 67/120
31/31 [==============================] - 25s 806ms/step - loss: 0.1119 - accuracy: 0.9628 - val_loss: 1.2579 - val_accuracy: 0.7255
Epoch 68/120
31/31 [==============================] - 25s 805ms/step - loss: 0.1054 - accuracy: 0.9641 - val_loss: 1.2802 - val_accuracy: 0.7316
Epoch 69/120
31/31 [==============================] - 25s 809ms/step - loss: 0.1141 - accuracy: 0.9632 - val_loss: 1.2662 - val_accuracy: 0.7249
Epoch 70/120
31/31 [==============================] - 25s 811ms/step - loss: 0.1283 - accuracy: 0.9588 - val_loss: 1.2953 - val_accuracy: 0.7267
Epoch 71/120
31/31 [==============================] - 25s 812ms/step - loss: 0.1286 - accuracy: 0.9573 - val_loss: 1.2955 - val_accuracy: 0.7182
Epoch 72/120
31/31 [==============================] - 25s 812ms/step - loss: 0.1245 - accuracy: 0.9585 - val_loss: 1.2770 - val_accuracy: 0.7252
Epoch 73/120
31/31 [==============================] - 25s 804ms/step - loss: 0.1123 - accuracy: 0.9619 - val_loss: 1.2185 - val_accuracy: 0.7352
Epoch 74/120
31/31 [==============================] - 25s 799ms/step - loss: 0.1111 - accuracy: 0.9628 - val_loss: 1.2939 - val_accuracy: 0.7301
Epoch 75/120
31/31 [==============================] - 25s 794ms/step - loss: 0.1157 - accuracy: 0.9589 - val_loss: 1.2848 - val_accuracy: 0.7249
Epoch 76/120
31/31 [==============================] - 24s 786ms/step - loss: 0.1272 - accuracy: 0.9600 - val_loss: 1.4034 - val_accuracy: 0.7055
Epoch 77/120
31/31 [==============================] - 24s 787ms/step - loss: 0.1130 - accuracy: 0.9643 - val_loss: 1.2689 - val_accuracy: 0.7288
Epoch 78/120
31/31 [==============================] - 25s 796ms/step - loss: 0.1139 - accuracy: 0.9632 - val_loss: 1.2868 - val_accuracy: 0.7319
Epoch 79/120
31/31 [==============================] - 24s 790ms/step - loss: 0.1081 - accuracy: 0.9664 - val_loss: 1.2589 - val_accuracy: 0.7367
Epoch 80/120
31/31 [==============================] - 24s 792ms/step - loss: 0.1173 - accuracy: 0.9601 - val_loss: 1.2524 - val_accuracy: 0.7370
Epoch 81/120
31/31 [==============================] - 24s 790ms/step - loss: 0.0954 - accuracy: 0.9698 - val_loss: 1.3084 - val_accuracy: 0.7316
Epoch 82/120
31/31 [==============================] - 24s 786ms/step - loss: 0.1131 - accuracy: 0.9659 - val_loss: 1.2946 - val_accuracy: 0.7364
Epoch 83/120
31/31 [==============================] - 24s 789ms/step - loss: 0.1016 - accuracy: 0.9677 - val_loss: 1.3048 - val_accuracy: 0.7243
Epoch 84/120
31/31 [==============================] - 25s 795ms/step - loss: 0.1054 - accuracy: 0.9647 - val_loss: 1.2448 - val_accuracy: 0.7331
Epoch 85/120
31/31 [==============================] - 24s 777ms/step - loss: 0.1193 - accuracy: 0.9585 - val_loss: 1.3090 - val_accuracy: 0.7264
Epoch 86/120
31/31 [==============================] - 24s 779ms/step - loss: 0.1080 - accuracy: 0.9677 - val_loss: 1.3062 - val_accuracy: 0.7307
Epoch 87/120
31/31 [==============================] - 24s 779ms/step - loss: 0.1145 - accuracy: 0.9609 - val_loss: 1.3153 - val_accuracy: 0.7249
Epoch 88/120
31/31 [==============================] - 24s 791ms/step - loss: 0.1139 - accuracy: 0.9635 - val_loss: 1.2657 - val_accuracy: 0.7319
Epoch 89/120
31/31 [==============================] - 24s 785ms/step - loss: 0.1105 - accuracy: 0.9616 - val_loss: 1.2638 - val_accuracy: 0.7358
Epoch 90/120
31/31 [==============================] - 24s 792ms/step - loss: 0.1060 - accuracy: 0.9656 - val_loss: 1.2903 - val_accuracy: 0.7307
Epoch 91/120
31/31 [==============================] - 24s 780ms/step - loss: 0.1115 - accuracy: 0.9594 - val_loss: 1.3693 - val_accuracy: 0.7210
Epoch 92/120
31/31 [==============================] - 24s 768ms/step - loss: 0.1080 - accuracy: 0.9646 - val_loss: 1.3287 - val_accuracy: 0.7194
Epoch 93/120
31/31 [==============================] - 24s 771ms/step - loss: 0.1008 - accuracy: 0.9680 - val_loss: 1.3775 - val_accuracy: 0.7143
Epoch 94/120
31/31 [==============================] - 24s 775ms/step - loss: 0.1204 - accuracy: 0.9594 - val_loss: 1.2982 - val_accuracy: 0.7258
Epoch 95/120
31/31 [==============================] - 24s 773ms/step - loss: 0.1090 - accuracy: 0.9632 - val_loss: 1.2630 - val_accuracy: 0.7355
Epoch 96/120
31/31 [==============================] - 24s 773ms/step - loss: 0.1015 - accuracy: 0.9674 - val_loss: 1.2966 - val_accuracy: 0.7298
Epoch 97/120
31/31 [==============================] - 24s 765ms/step - loss: 0.0981 - accuracy: 0.9662 - val_loss: 1.3311 - val_accuracy: 0.7291
Epoch 98/120
31/31 [==============================] - 24s 776ms/step - loss: 0.0884 - accuracy: 0.9721 - val_loss: 1.3187 - val_accuracy: 0.7310
Epoch 99/120
31/31 [==============================] - 24s 768ms/step - loss: 0.1058 - accuracy: 0.9686 - val_loss: 1.3309 - val_accuracy: 0.7234
Epoch 100/120
31/31 [==============================] - 24s 769ms/step - loss: 0.0874 - accuracy: 0.9709 - val_loss: 1.3834 - val_accuracy: 0.7176
Epoch 101/120
31/31 [==============================] - 24s 768ms/step - loss: 0.0953 - accuracy: 0.9686 - val_loss: 1.3373 - val_accuracy: 0.7337
Epoch 102/120
31/31 [==============================] - 24s 760ms/step - loss: 0.0950 - accuracy: 0.9680 - val_loss: 1.3445 - val_accuracy: 0.7295
Epoch 103/120
31/31 [==============================] - 24s 764ms/step - loss: 0.0939 - accuracy: 0.9700 - val_loss: 1.3362 - val_accuracy: 0.7255
Epoch 104/120
31/31 [==============================] - 24s 770ms/step - loss: 0.0888 - accuracy: 0.9715 - val_loss: 1.3337 - val_accuracy: 0.7246
Epoch 105/120
31/31 [==============================] - 24s 765ms/step - loss: 0.0966 - accuracy: 0.9689 - val_loss: 1.3452 - val_accuracy: 0.7291
Epoch 106/120
31/31 [==============================] - 23s 760ms/step - loss: 0.0958 - accuracy: 0.9682 - val_loss: 1.3566 - val_accuracy: 0.7298
Epoch 107/120
31/31 [==============================] - 23s 758ms/step - loss: 0.1022 - accuracy: 0.9640 - val_loss: 1.3120 - val_accuracy: 0.7264
Epoch 108/120
31/31 [==============================] - 24s 760ms/step - loss: 0.1010 - accuracy: 0.9659 - val_loss: 1.3058 - val_accuracy: 0.7376
Epoch 109/120
31/31 [==============================] - 24s 764ms/step - loss: 0.0981 - accuracy: 0.9664 - val_loss: 1.3446 - val_accuracy: 0.7291
Epoch 110/120
31/31 [==============================] - 23s 757ms/step - loss: 0.0883 - accuracy: 0.9701 - val_loss: 1.3963 - val_accuracy: 0.7176
Epoch 111/120
31/31 [==============================] - 24s 763ms/step - loss: 0.0975 - accuracy: 0.9686 - val_loss: 1.3859 - val_accuracy: 0.7234
Epoch 112/120
31/31 [==============================] - 24s 762ms/step - loss: 0.0935 - accuracy: 0.9689 - val_loss: 1.3096 - val_accuracy: 0.7270
Epoch 113/120
31/31 [==============================] - 23s 758ms/step - loss: 0.0884 - accuracy: 0.9718 - val_loss: 1.3689 - val_accuracy: 0.7176
Epoch 114/120
31/31 [==============================] - 24s 762ms/step - loss: 0.1125 - accuracy: 0.9616 - val_loss: 1.2905 - val_accuracy: 0.7413
Epoch 115/120
31/31 [==============================] - 24s 770ms/step - loss: 0.1047 - accuracy: 0.9653 - val_loss: 1.3230 - val_accuracy: 0.7382
Epoch 116/120
31/31 [==============================] - 24s 774ms/step - loss: 0.0987 - accuracy: 0.9670 - val_loss: 1.3490 - val_accuracy: 0.7282
Epoch 117/120
31/31 [==============================] - 24s 765ms/step - loss: 0.0889 - accuracy: 0.9706 - val_loss: 1.3818 - val_accuracy: 0.7185
Epoch 118/120
31/31 [==============================] - 24s 766ms/step - loss: 0.0863 - accuracy: 0.9725 - val_loss: 1.3508 - val_accuracy: 0.7355
Epoch 119/120
31/31 [==============================] - 23s 758ms/step - loss: 0.0877 - accuracy: 0.9721 - val_loss: 1.3684 - val_accuracy: 0.7270
Epoch 120/120
31/31 [==============================] - 23s 757ms/step - loss: 0.0870 - accuracy: 0.9719 - val_loss: 1.3816 - val_accuracy: 0.7328
In [ ]:
tset_loss,accuracy=model.evaluate(X_test,y_test)
print("The test Loss is:",tset_loss)
print("\nThe Best test Accuracy is:",accuracy*100)
104/104 [==============================] - 4s 35ms/step - loss: 1.3816 - accuracy: 0.7328
The test Loss is: 1.3816475868225098

The Best test Accuracy is: 73.27873706817627
In [ ]:
pd.DataFrame(model_history.history).plot(figsize=(12,7),xlabel='Epochs')
Out[ ]:
<matplotlib.axes._subplots.AxesSubplot at 0x7fbffd136390>
In [ ]:
from sklearn.metrics import classification_report,confusion_matrix
import seaborn as sns
y_pred=tf.round(model.predict(X_test))
y_pred=np.argmax(y_pred, axis=1)
conf_mat=confusion_matrix(y_test,y_pred)
fig,ax=plt.subplots(figsize=(14,7))
ax=sns.heatmap(conf_mat,cmap='coolwarm',annot=True)
In [ ]:
from sklearn.metrics import accuracy_score,precision_score,recall_score,f1_score
def evaluate_preds(y_test,y_pred):
  
  accuracy=accuracy_score(y_test,y_pred)
  precision=precision_score(y_test,y_pred,average='macro')
  recall=recall_score(y_test,y_pred,average='macro')
  f1=f1_score(y_test,y_pred,average='macro')
  metric_dict={'Accuracy':round(accuracy*100,2),
      "Precision":round(precision*100,2),
      "Recall":round(recall*100,2),
      "F1-Score":round(f1*100,2)}
  return metric_dict

performance_mat=evaluate_preds(y_test,y_pred)
perf_mat_series=pd.Series(performance_mat)
perf_mat_series
Out[ ]:
Accuracy     72.40
Precision    73.00
Recall       72.33
F1-Score     72.21
dtype: float64
In [ ]:
fig,ax=plt.subplots(figsize=(12,7))
ax=perf_mat_series.plot(kind='bar')
In [ ]:

In [ ]:

In [ ]: